Compare commits
28 Commits
manager-ca
...
readme-for
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e089896df9 | ||
|
|
0014eec124 | ||
|
|
e0b3f3eb45 | ||
|
|
4bbc8594a7 | ||
|
|
3a377300e1 | ||
|
|
33a07e3a86 | ||
|
|
212cafc1d7 | ||
|
|
2643b3cbcc | ||
|
|
d445229b6d | ||
|
|
dab5c451b0 | ||
|
|
7bdf06131a | ||
|
|
854648d5af | ||
|
|
c5f7b97359 | ||
|
|
dd8a727ad6 | ||
|
|
6c627fe422 | ||
|
|
ee980e1caf | ||
|
|
22bfaf6527 | ||
|
|
48ab48cc30 | ||
|
|
a0b14d4127 | ||
|
|
03f9fe1a70 | ||
|
|
8915b8d796 | ||
|
|
c77ffeeec0 | ||
|
|
4acf5660b2 | ||
|
|
2d9f0a668c | ||
|
|
9e6cb246cc | ||
|
|
14544ca63d | ||
|
|
26b347c04c | ||
|
|
36f75d1811 |
@@ -1 +0,0 @@
|
|||||||
PYPI_TOKEN=your-pypi-token
|
|
||||||
70
.github/workflows/ci.yml
vendored
70
.github/workflows/ci.yml
vendored
@@ -1,70 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main, feat/*, fix/* ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-openapi:
|
|
||||||
name: Validate OpenAPI Specification
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Check if OpenAPI changed
|
|
||||||
id: openapi-changed
|
|
||||||
uses: tj-actions/changed-files@v44
|
|
||||||
with:
|
|
||||||
files: openapi.yaml
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
|
|
||||||
- name: Install Redoc CLI
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
npm install -g @redocly/cli
|
|
||||||
|
|
||||||
- name: Validate OpenAPI specification
|
|
||||||
if: steps.openapi-changed.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
redocly lint openapi.yaml
|
|
||||||
|
|
||||||
code-quality:
|
|
||||||
name: Code Quality Checks
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # Fetch all history for proper diff
|
|
||||||
|
|
||||||
- name: Get changed Python files
|
|
||||||
id: changed-py-files
|
|
||||||
uses: tj-actions/changed-files@v44
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
**/*.py
|
|
||||||
files_ignore: |
|
|
||||||
comfyui_manager/legacy/**
|
|
||||||
|
|
||||||
- name: Setup Python
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
pip install ruff
|
|
||||||
|
|
||||||
- name: Run ruff linting on changed files
|
|
||||||
if: steps.changed-py-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Changed files: ${{ steps.changed-py-files.outputs.all_changed_files }}"
|
|
||||||
echo "${{ steps.changed-py-files.outputs.all_changed_files }}" | xargs -r ruff check
|
|
||||||
32
.github/workflows/publish-to-pypi.yml
vendored
32
.github/workflows/publish-to-pypi.yml
vendored
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- manager-v4
|
- draft-v4
|
||||||
paths:
|
paths:
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install build dependencies
|
- name: Install build dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -31,28 +31,28 @@ jobs:
|
|||||||
- name: Get current version
|
- name: Get current version
|
||||||
id: current_version
|
id: current_version
|
||||||
run: |
|
run: |
|
||||||
CURRENT_VERSION=$(grep -oP '^version = "\K[^"]+' pyproject.toml)
|
CURRENT_VERSION=$(grep -oP 'version = "\K[^"]+' pyproject.toml)
|
||||||
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||||
echo "Current version: $CURRENT_VERSION"
|
echo "Current version: $CURRENT_VERSION"
|
||||||
|
|
||||||
- name: Build package
|
- name: Build package
|
||||||
run: python -m build
|
run: python -m build
|
||||||
|
|
||||||
# - name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
# id: create_release
|
id: create_release
|
||||||
# uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
# env:
|
env:
|
||||||
# GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
# with:
|
with:
|
||||||
# files: dist/*
|
files: dist/*
|
||||||
# tag_name: v${{ steps.current_version.outputs.version }}
|
tag_name: v${{ steps.current_version.outputs.version }}
|
||||||
# draft: false
|
draft: false
|
||||||
# prerelease: false
|
prerelease: false
|
||||||
# generate_release_notes: true
|
generate_release_notes: true
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
password: ${{ secrets.PYPI_TOKEN }}
|
password: ${{ secrets.PYPI_TOKEN }}
|
||||||
skip-existing: true
|
skip-existing: true
|
||||||
verbose: true
|
verbose: true
|
||||||
25
.github/workflows/publish.yml
vendored
Normal file
25
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: Publish to Comfy registry
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main-blocked
|
||||||
|
paths:
|
||||||
|
- "pyproject.toml"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish-node:
|
||||||
|
name: Publish Custom Node to registry
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository_owner == 'ltdrdata' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Publish Custom Node
|
||||||
|
uses: Comfy-Org/publish-node-action@v1
|
||||||
|
with:
|
||||||
|
## Add your own personal access token to your Github Repository secrets and reference it here.
|
||||||
|
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
# Example: GitHub Actions workflow to auto-update test durations
|
|
||||||
# Rename to .github/workflows/update-test-durations.yml to enable
|
|
||||||
|
|
||||||
name: Update Test Durations
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
# Run weekly on Sundays at 2 AM UTC
|
|
||||||
- cron: '0 2 * * 0'
|
|
||||||
workflow_dispatch: # Allow manual trigger
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-durations:
|
|
||||||
runs-on: self-hosted
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -e .
|
|
||||||
pip install pytest pytest-split
|
|
||||||
|
|
||||||
- name: Update test durations
|
|
||||||
run: |
|
|
||||||
chmod +x tests/update_test_durations.sh
|
|
||||||
./tests/update_test_durations.sh
|
|
||||||
|
|
||||||
- name: Check for changes
|
|
||||||
id: check_changes
|
|
||||||
run: |
|
|
||||||
if git diff --quiet .test_durations; then
|
|
||||||
echo "changed=false" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "changed=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
if: steps.check_changes.outputs.changed == 'true'
|
|
||||||
uses: peter-evans/create-pull-request@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
commit-message: 'chore: update test duration data'
|
|
||||||
title: 'Update test duration data'
|
|
||||||
body: |
|
|
||||||
Automated update of `.test_durations` file for optimal parallel test distribution.
|
|
||||||
|
|
||||||
This ensures pytest-split can effectively balance test load across parallel environments.
|
|
||||||
branch: auto/update-test-durations
|
|
||||||
delete-branch: true
|
|
||||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -17,15 +17,4 @@ github-stats-cache.json
|
|||||||
pip_overrides.json
|
pip_overrides.json
|
||||||
*.json
|
*.json
|
||||||
check2.sh
|
check2.sh
|
||||||
/venv/
|
/venv/
|
||||||
build
|
|
||||||
dist
|
|
||||||
*.egg-info
|
|
||||||
.env
|
|
||||||
.git
|
|
||||||
.claude
|
|
||||||
.hypothesis
|
|
||||||
|
|
||||||
# Test artifacts
|
|
||||||
tests/tmp/
|
|
||||||
tests/env/
|
|
||||||
170
CLAUDE.md
170
CLAUDE.md
@@ -1,170 +0,0 @@
|
|||||||
# CLAUDE.md - Development Guidelines
|
|
||||||
|
|
||||||
## Project Context
|
|
||||||
This is ComfyUI Manager, a Python package that provides management functions for ComfyUI custom nodes, models, and extensions. The project follows modern Python packaging standards and maintains both current (`glob`) and legacy implementations.
|
|
||||||
|
|
||||||
## Code Architecture
|
|
||||||
- **Current Development**: Work in `comfyui_manager/glob/` package
|
|
||||||
- **Legacy Code**: `comfyui_manager/legacy/` (reference only, do not modify unless explicitly asked)
|
|
||||||
- **Common Utilities**: `comfyui_manager/common/` for shared functionality
|
|
||||||
- **Data Models**: `comfyui_manager/data_models/` for API schemas and types
|
|
||||||
|
|
||||||
## Development Workflow for API Changes
|
|
||||||
When modifying data being sent or received:
|
|
||||||
1. Update `openapi.yaml` file first
|
|
||||||
2. Verify YAML syntax using `yaml.safe_load`
|
|
||||||
3. Regenerate types following `data_models/README.md` instructions
|
|
||||||
4. Verify new data model generation
|
|
||||||
5. Verify syntax of generated type files
|
|
||||||
6. Run formatting and linting on generated files
|
|
||||||
7. Update `__init__.py` files in `data_models` to export new models
|
|
||||||
8. Make changes to rest of codebase
|
|
||||||
9. Run CI tests to verify changes
|
|
||||||
|
|
||||||
## Coding Standards
|
|
||||||
### Python Style
|
|
||||||
- Follow PEP 8 coding standards
|
|
||||||
- Use type hints for all function parameters and return values
|
|
||||||
- Target Python 3.9+ compatibility
|
|
||||||
- Line length: 120 characters (as configured in ruff)
|
|
||||||
|
|
||||||
### Security Guidelines
|
|
||||||
- Never hardcode API keys, tokens, or sensitive credentials
|
|
||||||
- Use environment variables for configuration
|
|
||||||
- Validate all user input and file paths
|
|
||||||
- Use prepared statements for database operations
|
|
||||||
- Implement proper error handling without exposing internal details
|
|
||||||
- Follow principle of least privilege for file/network access
|
|
||||||
|
|
||||||
### Code Quality
|
|
||||||
- Write descriptive variable and function names
|
|
||||||
- Include docstrings for public functions and classes
|
|
||||||
- Handle exceptions gracefully with specific error messages
|
|
||||||
- Use logging instead of print statements for debugging
|
|
||||||
- Maintain test coverage for new functionality
|
|
||||||
|
|
||||||
## Dependencies & Tools
|
|
||||||
### Core Dependencies
|
|
||||||
- GitPython, PyGithub for Git operations
|
|
||||||
- typer, rich for CLI interface
|
|
||||||
- transformers, huggingface-hub for AI model handling
|
|
||||||
- uv for fast package management
|
|
||||||
|
|
||||||
### Development Tools
|
|
||||||
- **Linting**: ruff (configured in pyproject.toml)
|
|
||||||
- **Testing**: pytest with coverage
|
|
||||||
- **Pre-commit**: pre-commit hooks for code quality
|
|
||||||
- **Type Checking**: Use type hints, consider mypy for strict checking
|
|
||||||
|
|
||||||
## File Organization
|
|
||||||
- Keep business logic in appropriate modules under `glob/`
|
|
||||||
- Place utility functions in `common/` for reusability
|
|
||||||
- Store UI/frontend code in `js/` directory
|
|
||||||
- Maintain documentation in `docs/` with multilingual support
|
|
||||||
|
|
||||||
### Large Data Files Policy
|
|
||||||
- **NEVER read .json files directly** - These contain large datasets that cause unnecessary token consumption
|
|
||||||
- Use `JSON_REFERENCE.md` for understanding JSON file structures and schemas
|
|
||||||
- Work with processed/filtered data through APIs when possible
|
|
||||||
- For structure analysis, refer to data models in `comfyui_manager/data_models/` instead
|
|
||||||
|
|
||||||
## Git Workflow
|
|
||||||
- Work on feature branches, not main directly
|
|
||||||
- Write clear, descriptive commit messages
|
|
||||||
- Run tests and linting before committing
|
|
||||||
- Keep commits atomic and focused
|
|
||||||
|
|
||||||
## Testing Requirements
|
|
||||||
|
|
||||||
### ⚠️ Critical: Always Reinstall Before Testing
|
|
||||||
**ALWAYS run `uv pip install .` before executing tests** to ensure latest code changes are installed.
|
|
||||||
|
|
||||||
### Test Execution Workflow
|
|
||||||
```bash
|
|
||||||
# 1. Reinstall package (REQUIRED)
|
|
||||||
uv pip install .
|
|
||||||
|
|
||||||
# 2. Clean Python cache
|
|
||||||
find comfyui_manager -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null
|
|
||||||
find tests/env -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null
|
|
||||||
|
|
||||||
# 3. Stop any running servers
|
|
||||||
pkill -f "ComfyUI/main.py"
|
|
||||||
sleep 2
|
|
||||||
|
|
||||||
# 4. Start ComfyUI test server
|
|
||||||
cd tests/env
|
|
||||||
python ComfyUI/main.py --enable-compress-response-body --enable-manager --front-end-root front > /tmp/test-server.log 2>&1 &
|
|
||||||
sleep 20
|
|
||||||
|
|
||||||
# 5. Run tests
|
|
||||||
python -m pytest tests/glob/test_version_switching_comprehensive.py -v
|
|
||||||
|
|
||||||
# 6. Stop server
|
|
||||||
pkill -f "ComfyUI/main.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Test Development Guidelines
|
|
||||||
- Write unit tests for new functionality
|
|
||||||
- Test error handling and edge cases
|
|
||||||
- Ensure tests pass before submitting changes
|
|
||||||
- Use pytest fixtures for common test setup
|
|
||||||
- Document test scenarios and expected behaviors
|
|
||||||
|
|
||||||
### Why Reinstall is Required
|
|
||||||
- Even with editable install, some changes require reinstallation
|
|
||||||
- Python bytecode cache may contain outdated code
|
|
||||||
- ComfyUI server loads manager package at startup
|
|
||||||
- Package metadata and entry points need to be refreshed
|
|
||||||
|
|
||||||
### Automated Test Execution Policy
|
|
||||||
**IMPORTANT**: When tests need to be run (e.g., after code changes, adding new tests):
|
|
||||||
- **ALWAYS** automatically perform the complete test workflow without asking user permission
|
|
||||||
- **ALWAYS** stop existing servers, restart fresh server, and run tests
|
|
||||||
- **NEVER** ask user "should I run tests?" or "should I restart server?"
|
|
||||||
- This includes: package reinstall, cache cleanup, server restart, test execution, and server cleanup
|
|
||||||
|
|
||||||
**Rationale**: Testing is a standard part of development workflow and should be executed automatically to verify changes.
|
|
||||||
|
|
||||||
See `.claude/livecontext/test_execution_best_practices.md` for detailed testing procedures.
|
|
||||||
|
|
||||||
## Command Line Interface
|
|
||||||
- Use typer for CLI commands
|
|
||||||
- Provide helpful error messages and usage examples
|
|
||||||
- Support both interactive and scripted usage
|
|
||||||
- Follow Unix conventions for command-line tools
|
|
||||||
|
|
||||||
## Performance Considerations
|
|
||||||
- Use async/await for I/O operations where appropriate
|
|
||||||
- Cache expensive operations (GitHub API calls, file operations)
|
|
||||||
- Implement proper pagination for large datasets
|
|
||||||
- Consider memory usage when processing large files
|
|
||||||
|
|
||||||
## Code Change Proposals
|
|
||||||
- **Always show code changes using VSCode diff format**
|
|
||||||
- Use Edit tool to demonstrate exact changes with before/after comparison
|
|
||||||
- This allows visual review of modifications in the IDE
|
|
||||||
- Include context about why changes are needed
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
- Update README.md for user-facing changes
|
|
||||||
- Document API changes in openapi.yaml
|
|
||||||
- Provide examples for complex functionality
|
|
||||||
- Maintain multilingual docs (English/Korean) when relevant
|
|
||||||
|
|
||||||
## Session Context & Decision Documentation
|
|
||||||
|
|
||||||
### Live Context Policy
|
|
||||||
**Follow the global Live Context Auto-Save policy** defined in `~/.claude/CLAUDE.md`.
|
|
||||||
|
|
||||||
### Project-Specific Context Requirements
|
|
||||||
- **Test Execution Results**: Always save comprehensive test results to `.claude/livecontext/`
|
|
||||||
- Test count, pass/fail status, execution time
|
|
||||||
- New tests added and their purpose
|
|
||||||
- Coverage metrics and improvements
|
|
||||||
- **CNR Version Switching Context**: Document version switching behavior and edge cases
|
|
||||||
- Update vs Install operation differences
|
|
||||||
- Old version handling (preserved vs deleted)
|
|
||||||
- State management insights
|
|
||||||
- **API Changes**: Document OpenAPI schema changes and data model updates
|
|
||||||
- **Architecture Decisions**: Document manager_core.py and manager_server.py design choices
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
## Testing Changes
|
|
||||||
|
|
||||||
1. Activate the ComfyUI environment.
|
|
||||||
|
|
||||||
2. Build package locally after making changes.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# from inside the ComfyUI-Manager directory, with the ComfyUI environment activated
|
|
||||||
python -m build
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Install the package locally in the ComfyUI environment.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Uninstall existing package
|
|
||||||
pip uninstall comfyui-manager
|
|
||||||
|
|
||||||
# Install the locale package
|
|
||||||
pip install dist/comfyui-manager-*.whl
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Start ComfyUI.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# after navigating to the ComfyUI directory
|
|
||||||
python main.py
|
|
||||||
```
|
|
||||||
|
|
||||||
## Manually Publish Test Version to PyPi
|
|
||||||
|
|
||||||
1. Set the `PYPI_TOKEN` environment variable in env file.
|
|
||||||
|
|
||||||
2. If manually publishing, you likely want to use a release candidate version, so set the version in [pyproject.toml](pyproject.toml) to something like `0.0.1rc1`.
|
|
||||||
|
|
||||||
3. Build the package.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python -m build
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Upload the package to PyPi.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python -m twine upload dist/* --username __token__ --password $PYPI_TOKEN
|
|
||||||
```
|
|
||||||
|
|
||||||
5. View at https://pypi.org/project/comfyui-manager/
|
|
||||||
@@ -1,187 +0,0 @@
|
|||||||
# ComfyUI Manager Documentation Index
|
|
||||||
|
|
||||||
**Last Updated**: 2025-11-04
|
|
||||||
**Purpose**: Navigate all project documentation organized by purpose and audience
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📖 Quick Links
|
|
||||||
|
|
||||||
- **Getting Started**: [README.md](README.md)
|
|
||||||
- **User Documentation**: [docs/](docs/)
|
|
||||||
- **Test Documentation**: [tests/glob/](tests/glob/)
|
|
||||||
- **Contributing**: [CONTRIBUTING.md](CONTRIBUTING.md)
|
|
||||||
- **Development**: [CLAUDE.md](CLAUDE.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 Documentation Structure
|
|
||||||
|
|
||||||
### Root Level
|
|
||||||
|
|
||||||
| Document | Purpose | Audience |
|
|
||||||
|----------|---------|----------|
|
|
||||||
| [README.md](README.md) | Project overview and quick start | Everyone |
|
|
||||||
| [CONTRIBUTING.md](CONTRIBUTING.md) | Contribution guidelines | Contributors |
|
|
||||||
| [CLAUDE.md](CLAUDE.md) | Development guidelines for AI-assisted development | Developers |
|
|
||||||
| [JSON_REFERENCE.md](JSON_REFERENCE.md) | JSON file schema reference | Developers |
|
|
||||||
|
|
||||||
### User Documentation (`docs/`)
|
|
||||||
|
|
||||||
| Document | Purpose | Language |
|
|
||||||
|----------|---------|----------|
|
|
||||||
| [docs/README.md](docs/README.md) | Documentation overview | English |
|
|
||||||
| [docs/PACKAGE_VERSION_MANAGEMENT.md](docs/PACKAGE_VERSION_MANAGEMENT.md) | Package version management guide | English |
|
|
||||||
| [docs/SECURITY_ENHANCED_INSTALLATION.md](docs/SECURITY_ENHANCED_INSTALLATION.md) | Security features for URL installation | English |
|
|
||||||
| [docs/en/cm-cli.md](docs/en/cm-cli.md) | CLI usage guide | English |
|
|
||||||
| [docs/en/use_aria2.md](docs/en/use_aria2.md) | Aria2 download configuration | English |
|
|
||||||
| [docs/ko/cm-cli.md](docs/ko/cm-cli.md) | CLI usage guide | Korean |
|
|
||||||
|
|
||||||
### Package Documentation
|
|
||||||
|
|
||||||
| Package | Document | Purpose |
|
|
||||||
|---------|----------|---------|
|
|
||||||
| comfyui_manager | [comfyui_manager/README.md](comfyui_manager/README.md) | Package overview |
|
|
||||||
| common | [comfyui_manager/common/README.md](comfyui_manager/common/README.md) | Common utilities documentation |
|
|
||||||
| data_models | [comfyui_manager/data_models/README.md](comfyui_manager/data_models/README.md) | Data model generation guide |
|
|
||||||
| glob | [comfyui_manager/glob/CLAUDE.md](comfyui_manager/glob/CLAUDE.md) | Glob module development guide |
|
|
||||||
| js | [comfyui_manager/js/README.md](comfyui_manager/js/README.md) | JavaScript components |
|
|
||||||
|
|
||||||
### Test Documentation (`tests/`)
|
|
||||||
|
|
||||||
| Document | Purpose | Status |
|
|
||||||
|----------|---------|--------|
|
|
||||||
| [tests/TEST.md](tests/TEST.md) | Testing overview | ✅ |
|
|
||||||
| [tests/glob/README.md](tests/glob/README.md) | Glob API endpoint tests | ✅ Translated |
|
|
||||||
| [tests/glob/TESTING_GUIDE.md](tests/glob/TESTING_GUIDE.md) | Test execution guide | ✅ |
|
|
||||||
| [tests/glob/TEST_INDEX.md](tests/glob/TEST_INDEX.md) | Test documentation unified index | ✅ Translated |
|
|
||||||
| [tests/glob/TEST_LOG.md](tests/glob/TEST_LOG.md) | Test execution log | ✅ Translated |
|
|
||||||
|
|
||||||
### Node Database
|
|
||||||
|
|
||||||
| Document | Purpose |
|
|
||||||
|----------|---------|
|
|
||||||
| [node_db/README.md](node_db/README.md) | Node database information |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔒 Internal Documentation (`docs/internal/`)
|
|
||||||
|
|
||||||
### CLI Migration (`docs/internal/cli_migration/`)
|
|
||||||
|
|
||||||
Historical documentation for CLI migration from legacy to glob module (completed).
|
|
||||||
|
|
||||||
| Document | Purpose |
|
|
||||||
|----------|---------|
|
|
||||||
| [README.md](docs/internal/cli_migration/README.md) | Migration plan overview |
|
|
||||||
| [CLI_COMPATIBILITY_ANALYSIS.md](docs/internal/cli_migration/CLI_COMPATIBILITY_ANALYSIS.md) | Legacy vs Glob compatibility analysis |
|
|
||||||
| [CLI_IMPLEMENTATION_CONTEXT.md](docs/internal/cli_migration/CLI_IMPLEMENTATION_CONTEXT.md) | Implementation context |
|
|
||||||
| [CLI_IMPLEMENTATION_TODO.md](docs/internal/cli_migration/CLI_IMPLEMENTATION_TODO.md) | Implementation checklist |
|
|
||||||
| [CLI_PURE_GLOB_MIGRATION_PLAN.md](docs/internal/cli_migration/CLI_PURE_GLOB_MIGRATION_PLAN.md) | Technical migration specification |
|
|
||||||
| [CLI_GLOB_API_REFERENCE.md](docs/internal/cli_migration/CLI_GLOB_API_REFERENCE.md) | Glob API reference |
|
|
||||||
| [CLI_IMPLEMENTATION_CONSTRAINTS.md](docs/internal/cli_migration/CLI_IMPLEMENTATION_CONSTRAINTS.md) | Migration constraints |
|
|
||||||
| [CLI_TESTING_CHECKLIST.md](docs/internal/cli_migration/CLI_TESTING_CHECKLIST.md) | Testing checklist |
|
|
||||||
| [CLI_SHOW_LIST_REVISION.md](docs/internal/cli_migration/CLI_SHOW_LIST_REVISION.md) | show_list implementation plan |
|
|
||||||
|
|
||||||
### Test Planning (`docs/internal/test_planning/`)
|
|
||||||
|
|
||||||
Internal test planning documents (in Korean).
|
|
||||||
|
|
||||||
| Document | Purpose | Language |
|
|
||||||
|----------|---------|----------|
|
|
||||||
| [TEST_PLAN_ADDITIONAL.md](docs/internal/test_planning/TEST_PLAN_ADDITIONAL.md) | Additional test scenarios | Korean |
|
|
||||||
| [COMPLEX_SCENARIOS_TEST_PLAN.md](docs/internal/test_planning/COMPLEX_SCENARIOS_TEST_PLAN.md) | Complex multi-version test scenarios | Korean |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Documentation by Audience
|
|
||||||
|
|
||||||
### For Users
|
|
||||||
1. [README.md](README.md) - Start here
|
|
||||||
2. [docs/en/cm-cli.md](docs/en/cm-cli.md) - CLI usage
|
|
||||||
3. [docs/PACKAGE_VERSION_MANAGEMENT.md](docs/PACKAGE_VERSION_MANAGEMENT.md) - Version management
|
|
||||||
|
|
||||||
### For Contributors
|
|
||||||
1. [CONTRIBUTING.md](CONTRIBUTING.md) - Contribution process
|
|
||||||
2. [CLAUDE.md](CLAUDE.md) - Development guidelines
|
|
||||||
3. [comfyui_manager/data_models/README.md](comfyui_manager/data_models/README.md) - Data model workflow
|
|
||||||
|
|
||||||
### For Developers
|
|
||||||
1. [CLAUDE.md](CLAUDE.md) - Development workflow
|
|
||||||
2. [comfyui_manager/glob/CLAUDE.md](comfyui_manager/glob/CLAUDE.md) - Glob module guide
|
|
||||||
3. [JSON_REFERENCE.md](JSON_REFERENCE.md) - Schema reference
|
|
||||||
4. [docs/PACKAGE_VERSION_MANAGEMENT.md](docs/PACKAGE_VERSION_MANAGEMENT.md) - Package management internals
|
|
||||||
|
|
||||||
### For Testers
|
|
||||||
1. [tests/TEST.md](tests/TEST.md) - Testing overview
|
|
||||||
2. [tests/glob/TEST_INDEX.md](tests/glob/TEST_INDEX.md) - Test documentation index
|
|
||||||
3. [tests/glob/TESTING_GUIDE.md](tests/glob/TESTING_GUIDE.md) - Test execution guide
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔄 Documentation Maintenance
|
|
||||||
|
|
||||||
### When to Update
|
|
||||||
- **README.md**: Project structure or main features change
|
|
||||||
- **CLAUDE.md**: Development workflow changes
|
|
||||||
- **Test Documentation**: New tests added or test structure changes
|
|
||||||
- **User Documentation**: User-facing features change
|
|
||||||
- **This Index**: New documentation added or reorganized
|
|
||||||
|
|
||||||
### Documentation Standards
|
|
||||||
- Use clear, descriptive titles
|
|
||||||
- Include "Last Updated" date
|
|
||||||
- Specify target audience
|
|
||||||
- Provide examples where applicable
|
|
||||||
- Keep language simple and accessible
|
|
||||||
- Translate user-facing docs to Korean when possible
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🗂️ File Organization
|
|
||||||
|
|
||||||
```
|
|
||||||
comfyui-manager/
|
|
||||||
├── DOCUMENTATION_INDEX.md (this file)
|
|
||||||
├── README.md
|
|
||||||
├── CONTRIBUTING.md
|
|
||||||
├── CLAUDE.md
|
|
||||||
├── JSON_REFERENCE.md
|
|
||||||
├── docs/
|
|
||||||
│ ├── README.md
|
|
||||||
│ ├── PACKAGE_VERSION_MANAGEMENT.md
|
|
||||||
│ ├── SECURITY_ENHANCED_INSTALLATION.md
|
|
||||||
│ ├── en/
|
|
||||||
│ │ ├── cm-cli.md
|
|
||||||
│ │ └── use_aria2.md
|
|
||||||
│ ├── ko/
|
|
||||||
│ │ └── cm-cli.md
|
|
||||||
│ └── internal/
|
|
||||||
│ ├── cli_migration/ (9 files - completed migration docs)
|
|
||||||
│ └── test_planning/ (2 files - Korean test plans)
|
|
||||||
├── comfyui_manager/
|
|
||||||
│ ├── README.md
|
|
||||||
│ ├── common/README.md
|
|
||||||
│ ├── data_models/README.md
|
|
||||||
│ ├── glob/CLAUDE.md
|
|
||||||
│ └── js/README.md
|
|
||||||
├── tests/
|
|
||||||
│ ├── TEST.md
|
|
||||||
│ └── glob/
|
|
||||||
│ ├── README.md
|
|
||||||
│ ├── TESTING_GUIDE.md
|
|
||||||
│ ├── TEST_INDEX.md
|
|
||||||
│ └── TEST_LOG.md
|
|
||||||
└── node_db/
|
|
||||||
└── README.md
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Total Documentation Files**: 36 files organized across 6 categories
|
|
||||||
|
|
||||||
**Translation Status**:
|
|
||||||
- ✅ Core user documentation: English
|
|
||||||
- ✅ CLI guide: English + Korean
|
|
||||||
- ✅ Test documentation: English (translated from Korean)
|
|
||||||
- 📝 Internal planning docs: Korean (preserved as-is for historical reference)
|
|
||||||
14
MANIFEST.in
14
MANIFEST.in
@@ -1,14 +0,0 @@
|
|||||||
include comfyui_manager/js/*
|
|
||||||
include comfyui_manager/*.json
|
|
||||||
include comfyui_manager/glob/*
|
|
||||||
include LICENSE.txt
|
|
||||||
include README.md
|
|
||||||
include requirements.txt
|
|
||||||
include pyproject.toml
|
|
||||||
include custom-node-list.json
|
|
||||||
include extension-node-list.json
|
|
||||||
include extras.json
|
|
||||||
include github-stats.json
|
|
||||||
include model-list.json
|
|
||||||
include alter-list.json
|
|
||||||
include comfyui_manager/channels.list.template
|
|
||||||
162
README.md
162
README.md
@@ -1,39 +1,100 @@
|
|||||||
# ComfyUI Manager
|
# ComfyUI Manager (V3.0)
|
||||||
|
|
||||||
|
## Introducing the New ComfyUI Manager (V4.0)
|
||||||
|
|
||||||
|
This branch is a temporary branch maintained for users of the older ComfyUI. It will be kept for a limited time and then replaced by the [manager-v4](https://github.com/Comfy-Org/ComfyUI-Manager/tree/manager-v4) branch. (This branch will be renamed to the `manager-v3` branch.)
|
||||||
|
|
||||||
|
Previously, **ComfyUI Manager** functioned as a somewhat independent extension of ComfyUI, requiring users to install it via `git clone`. This branch will continue to exist for a while to ensure that using `git clone` with older versions of ComfyUI does not cause problems.
|
||||||
|
|
||||||
|
The new **ComfyUI Manager** is now managed as an optional dependency of ComfyUI. This means that if you are using the new ComfyUI, you no longer need to visit this repository to use **ComfyUI Manager**.
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
|
||||||
|
* **ComfyUI Manager** is now available as a package on PyPI: [https://pypi.org/project/comfyui-manager](https://pypi.org/project/comfyui-manager)
|
||||||
|
* Even if the **ComfyUI Manager** dependency is installed, you must enable it by adding the `--enable-manager` option when running ComfyUI.
|
||||||
|
* Once the new **ComfyUI Manager** is enabled, any copy of **comfyui-manager** installed under `ComfyUI/custom_nodes` will be disabled.
|
||||||
|
* Please make all future contributions for feature improvements and bug fixes to the manager-v4 branch.
|
||||||
|
* For now, custom node registration will continue in this branch as well, but it will eventually be fully replaced by registration through https://registry.comfy.org via `pyproject.toml` ([guide](https://docs.comfy.org/registry/overview)).
|
||||||
|
---
|
||||||
|
|
||||||
**ComfyUI-Manager** is an extension designed to enhance the usability of [ComfyUI](https://github.com/comfyanonymous/ComfyUI). It offers management functions to **install, remove, disable, and enable** various custom nodes of ComfyUI. Furthermore, this extension provides a hub feature and convenience functions to access a wide range of information within ComfyUI.
|
**ComfyUI-Manager** is an extension designed to enhance the usability of [ComfyUI](https://github.com/comfyanonymous/ComfyUI). It offers management functions to **install, remove, disable, and enable** various custom nodes of ComfyUI. Furthermore, this extension provides a hub feature and convenience functions to access a wide range of information within ComfyUI.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## NOTICE
|
|
||||||
* V4.0: Modify the structure to be installable via pip instead of using git clone.
|
|
||||||
* V3.16: Support for `uv` has been added. Set `use_uv` in `config.ini`.
|
|
||||||
* V3.10: `double-click feature` is removed
|
|
||||||
* This feature has been moved to https://github.com/ltdrdata/comfyui-connection-helper
|
|
||||||
* V3.3.2: Overhauled. Officially supports [https://registry.comfy.org/](https://registry.comfy.org/).
|
|
||||||
* You can see whole nodes info on [ComfyUI Nodes Info](https://ltdrdata.github.io/) page.
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
* When installing the latest ComfyUI, it will be automatically installed as a dependency, so manual installation is no longer necessary.
|
### Installation[method1] (General installation method: ComfyUI-Manager only)
|
||||||
|
|
||||||
* Manual installation of the nightly version:
|
To install ComfyUI-Manager in addition to an existing installation of ComfyUI, you can follow the following steps:
|
||||||
* Clone to a temporary directory (**Note:** Do **not** clone into `ComfyUI/custom_nodes`.)
|
|
||||||
```
|
|
||||||
git clone https://github.com/Comfy-Org/ComfyUI-Manager
|
|
||||||
```
|
|
||||||
* Install via pip
|
|
||||||
```
|
|
||||||
cd ComfyUI-Manager
|
|
||||||
pip install .
|
|
||||||
```
|
|
||||||
|
|
||||||
|
1. goto `ComfyUI/custom_nodes` dir in terminal(cmd)
|
||||||
|
2. `git clone https://github.com/ltdrdata/ComfyUI-Manager comfyui-manager`
|
||||||
|
3. Restart ComfyUI
|
||||||
|
|
||||||
|
|
||||||
|
### Installation[method2] (Installation for portable ComfyUI version: ComfyUI-Manager only)
|
||||||
|
1. install git
|
||||||
|
- https://git-scm.com/download/win
|
||||||
|
- standalone version
|
||||||
|
- select option: use windows default console window
|
||||||
|
2. Download [scripts/install-manager-for-portable-version.bat](https://github.com/ltdrdata/ComfyUI-Manager/raw/main/scripts/install-manager-for-portable-version.bat) into installed `"ComfyUI_windows_portable"` directory
|
||||||
|
- Don't click. Right click the link and use save as...
|
||||||
|
3. double click `install-manager-for-portable-version.bat` batch file
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
### Installation[method3] (Installation through comfy-cli: install ComfyUI and ComfyUI-Manager at once.)
|
||||||
|
> RECOMMENDED: comfy-cli provides various features to manage ComfyUI from the CLI.
|
||||||
|
|
||||||
|
* **prerequisite: python 3, git**
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
```commandline
|
||||||
|
python -m venv venv
|
||||||
|
venv\Scripts\activate
|
||||||
|
pip install comfy-cli
|
||||||
|
comfy install
|
||||||
|
```
|
||||||
|
|
||||||
|
Linux/OSX:
|
||||||
|
```commandline
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install comfy-cli
|
||||||
|
comfy install
|
||||||
|
```
|
||||||
* See also: https://github.com/Comfy-Org/comfy-cli
|
* See also: https://github.com/Comfy-Org/comfy-cli
|
||||||
|
|
||||||
|
|
||||||
## Front-end
|
### Installation[method4] (Installation for linux+venv: ComfyUI + ComfyUI-Manager)
|
||||||
|
|
||||||
* The built-in front-end of ComfyUI-Manager is the legacy front-end. The front-end for ComfyUI-Manager is now provided via [ComfyUI Frontend](https://github.com/Comfy-Org/ComfyUI_frontend).
|
To install ComfyUI with ComfyUI-Manager on Linux using a venv environment, you can follow these steps:
|
||||||
* To enable the legacy front-end, set the environment variable `ENABLE_LEGACY_COMFYUI_MANAGER_FRONT` to `true` before running.
|
* **prerequisite: python-is-python3, python3-venv, git**
|
||||||
|
|
||||||
|
1. Download [scripts/install-comfyui-venv-linux.sh](https://github.com/ltdrdata/ComfyUI-Manager/raw/main/scripts/install-comfyui-venv-linux.sh) into empty install directory
|
||||||
|
- Don't click. Right click the link and use save as...
|
||||||
|
- ComfyUI will be installed in the subdirectory of the specified directory, and the directory will contain the generated executable script.
|
||||||
|
2. `chmod +x install-comfyui-venv-linux.sh`
|
||||||
|
3. `./install-comfyui-venv-linux.sh`
|
||||||
|
|
||||||
|
### Installation Precautions
|
||||||
|
* **DO**: `ComfyUI-Manager` files must be accurately located in the path `ComfyUI/custom_nodes/comfyui-manager`
|
||||||
|
* Installing in a compressed file format is not recommended.
|
||||||
|
* **DON'T**: Decompress directly into the `ComfyUI/custom_nodes` location, resulting in the Manager contents like `__init__.py` being placed directly in that directory.
|
||||||
|
* You have to remove all ComfyUI-Manager files from `ComfyUI/custom_nodes`
|
||||||
|
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager/ComfyUI-Manager`.
|
||||||
|
* **DON'T**: In a form where decompression occurs in a path such as `ComfyUI/custom_nodes/ComfyUI-Manager-main`.
|
||||||
|
* In such cases, `ComfyUI-Manager` may operate, but it won't be recognized within `ComfyUI-Manager`, and updates cannot be performed. It also poses the risk of duplicate installations. Remove it and install properly via `git clone` method.
|
||||||
|
|
||||||
|
|
||||||
|
You can execute ComfyUI by running either `./run_gpu.sh` or `./run_cpu.sh` depending on your system configuration.
|
||||||
|
|
||||||
|
## Colab Notebook
|
||||||
|
This repository provides Colab notebooks that allow you to install and use ComfyUI, including ComfyUI-Manager. To use ComfyUI, [click on this link](https://colab.research.google.com/github/ltdrdata/ComfyUI-Manager/blob/main/notebooks/comfyui_colab_with_manager.ipynb).
|
||||||
|
* Support for installing ComfyUI
|
||||||
|
* Support for basic installation of ComfyUI-Manager
|
||||||
|
* Support for automatically installing dependencies of custom nodes upon restarting Colab notebooks.
|
||||||
|
|
||||||
|
|
||||||
## How To Use
|
## How To Use
|
||||||
@@ -215,14 +276,13 @@ The following settings are applied based on the section marked as `is_default`.
|
|||||||
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
|
downgrade_blacklist = <Set a list of packages to prevent downgrades. List them separated by commas.>
|
||||||
security_level = <Set the security level => strong|normal|normal-|weak>
|
security_level = <Set the security level => strong|normal|normal-|weak>
|
||||||
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
|
always_lazy_install = <Whether to perform dependency installation on restart even in environments other than Windows.>
|
||||||
network_mode = <Set the network mode => public|private|offline|personal_cloud>
|
network_mode = <Set the network mode => public|private|offline>
|
||||||
```
|
```
|
||||||
|
|
||||||
* network_mode:
|
* network_mode:
|
||||||
- public: An environment that uses a typical public network.
|
- public: An environment that uses a typical public network.
|
||||||
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
|
- private: An environment that uses a closed network, where a private node DB is configured via `channel_url`. (Uses cache if available)
|
||||||
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
|
- offline: An environment that does not use any external connections when using an offline network. (Uses cache if available)
|
||||||
- personal_cloud: Applies relaxed security features in cloud environments such as Google Colab or Runpod, where strong security is not required.
|
|
||||||
|
|
||||||
|
|
||||||
## Additional Feature
|
## Additional Feature
|
||||||
@@ -313,33 +373,31 @@ When you run the `scan.sh` script:
|
|||||||
|
|
||||||
|
|
||||||
## Security policy
|
## Security policy
|
||||||
|
* Edit `config.ini` file: add `security_level = <LEVEL>`
|
||||||
The security settings are applied based on whether the ComfyUI server's listener is non-local and whether the network mode is set to `personal_cloud`.
|
* `strong`
|
||||||
|
* doesn't allow `high` and `middle` level risky feature
|
||||||
* **non-local**: When the server is launched with `--listen` and is bound to a network range other than the local `127.` range, allowing remote IP access.
|
* `normal`
|
||||||
* **personal\_cloud**: When the `network_mode` is set to `personal_cloud`.
|
* doesn't allow `high` level risky feature
|
||||||
|
* `middle` level risky feature is available
|
||||||
|
* `normal-`
|
||||||
### Risky Level Table
|
* doesn't allow `high` level risky feature if `--listen` is specified and not starts with `127.`
|
||||||
|
* `middle` level risky feature is available
|
||||||
| Risky Level | features |
|
* `weak`
|
||||||
|-------------|---------------------------------------------------------------------------------------------------------------------------------------|
|
* all feature is available
|
||||||
| high+ | * `Install via git url`, `pip install`<BR>* Installation of nodepack registered not in the `default channel`. |
|
|
||||||
| high | * Fix nodepack |
|
* `high` level risky features
|
||||||
| middle+ | * Uninstall/Update<BR>* Installation of nodepack registered in the `default channel`.<BR>* Restore/Remove Snapshot<BR>* Install model |
|
* `Install via git url`, `pip install`
|
||||||
| middle | * Restart |
|
* Installation of custom nodes registered not in the `default channel`.
|
||||||
| low | * Update ComfyUI |
|
* Fix custom nodes
|
||||||
|
|
||||||
|
* `middle` level risky features
|
||||||
### Security Level Table
|
* Uninstall/Update
|
||||||
|
* Installation of custom nodes registered in the `default channel`.
|
||||||
| Security Level | local | non-local (personal_cloud) | non-local (not personal_cloud) |
|
* Restore/Remove Snapshot
|
||||||
|----------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|--------------------------------|
|
* Restart
|
||||||
| strong | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed | * Only `weak` level risky features are allowed |
|
|
||||||
| normal | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
* `low` level risky features
|
||||||
| normal- | * All features are available | * `high+` and `high` level risky features are not allowed<BR>* `middle+` and `middle` level risky features are available | * `high+`, `high` and `middle+` level risky features are not allowed<BR>* `middle` level risky features are available
|
* Update ComfyUI
|
||||||
| weak | * All features are available | * All features are available | * `high+` and `middle+` level risky features are not allowed<BR>* `high`, `middle` and `low` level risky features are available
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Disclaimer
|
# Disclaimer
|
||||||
|
|||||||
25
__init__.py
Normal file
25
__init__.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""
|
||||||
|
This file is the entry point for the ComfyUI-Manager package, handling CLI-only mode and initial setup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
cli_mode_flag = os.path.join(os.path.dirname(__file__), '.enable-cli-only-mode')
|
||||||
|
|
||||||
|
if not os.path.exists(cli_mode_flag):
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "glob"))
|
||||||
|
import manager_server # noqa: F401
|
||||||
|
import share_3rdparty # noqa: F401
|
||||||
|
import cm_global
|
||||||
|
|
||||||
|
if not cm_global.disable_front and not 'DISABLE_COMFYUI_MANAGER_FRONT' in os.environ:
|
||||||
|
WEB_DIRECTORY = "js"
|
||||||
|
else:
|
||||||
|
print("\n[ComfyUI-Manager] !! cli-only-mode is enabled !!\n")
|
||||||
|
|
||||||
|
NODE_CLASS_MAPPINGS = {}
|
||||||
|
__all__ = ['NODE_CLASS_MAPPINGS']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
6
channels.list.template
Normal file
6
channels.list.template
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
default::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main
|
||||||
|
recent::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/new
|
||||||
|
legacy::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/legacy
|
||||||
|
forked::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/forked
|
||||||
|
dev::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/dev
|
||||||
|
tutorial::https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/node_db/tutorial
|
||||||
@@ -15,31 +15,31 @@ import git
|
|||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
from ..common import manager_util
|
sys.path.append(os.path.dirname(__file__))
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "glob"))
|
||||||
|
|
||||||
|
import manager_util
|
||||||
|
|
||||||
# read env vars
|
# read env vars
|
||||||
# COMFYUI_FOLDERS_BASE_PATH is not required in cm-cli.py
|
# COMFYUI_FOLDERS_BASE_PATH is not required in cm-cli.py
|
||||||
# `comfy_path` should be resolved before importing manager_core
|
# `comfy_path` should be resolved before importing manager_core
|
||||||
|
|
||||||
comfy_path = os.environ.get('COMFYUI_PATH')
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
print("[bold red]cm-cli: environment variable 'COMFYUI_PATH' is not specified.[/bold red]")
|
try:
|
||||||
exit(-1)
|
import folder_paths
|
||||||
|
comfy_path = os.path.join(os.path.dirname(folder_paths.__file__))
|
||||||
|
except:
|
||||||
|
print("\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr)
|
||||||
|
comfy_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..', '..'))
|
||||||
|
|
||||||
|
# This should be placed here
|
||||||
sys.path.append(comfy_path)
|
sys.path.append(comfy_path)
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
|
||||||
print("[bold red]cm-cli: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.[/bold red]")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
|
|
||||||
import utils.extra_config
|
import utils.extra_config
|
||||||
from ..common import cm_global
|
import cm_global
|
||||||
from ..glob import manager_core as core
|
import manager_core as core
|
||||||
from ..common import context
|
from manager_core import unified_manager
|
||||||
from ..glob.manager_core import unified_manager
|
import cnr_utils
|
||||||
from ..common import cnr_utils
|
|
||||||
|
|
||||||
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
comfyui_manager_path = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ def check_comfyui_hash():
|
|||||||
repo = git.Repo(comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
core.comfy_ui_revision = len(list(repo.iter_commits('HEAD')))
|
core.comfy_ui_revision = len(list(repo.iter_commits('HEAD')))
|
||||||
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
||||||
except Exception:
|
except:
|
||||||
print('[bold yellow]INFO: Frozen ComfyUI mode.[/bold yellow]')
|
print('[bold yellow]INFO: Frozen ComfyUI mode.[/bold yellow]')
|
||||||
core.comfy_ui_revision = 0
|
core.comfy_ui_revision = 0
|
||||||
core.comfy_ui_commit_datetime = 0
|
core.comfy_ui_commit_datetime = 0
|
||||||
@@ -82,7 +82,7 @@ def read_downgrade_blacklist():
|
|||||||
try:
|
try:
|
||||||
import configparser
|
import configparser
|
||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(core.manager_config.path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
|
|
||||||
if 'downgrade_blacklist' in default_conf:
|
if 'downgrade_blacklist' in default_conf:
|
||||||
@@ -90,7 +90,7 @@ def read_downgrade_blacklist():
|
|||||||
items = [x.strip() for x in items if x != '']
|
items = [x.strip() for x in items if x != '']
|
||||||
cm_global.pip_downgrade_blacklist += items
|
cm_global.pip_downgrade_blacklist += items
|
||||||
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -105,7 +105,7 @@ class Ctx:
|
|||||||
self.no_deps = False
|
self.no_deps = False
|
||||||
self.mode = 'cache'
|
self.mode = 'cache'
|
||||||
self.user_directory = None
|
self.user_directory = None
|
||||||
self.custom_nodes_paths = [os.path.join(context.comfy_base_path, 'custom_nodes')]
|
self.custom_nodes_paths = [os.path.join(core.comfy_base_path, 'custom_nodes')]
|
||||||
self.manager_files_directory = os.path.dirname(__file__)
|
self.manager_files_directory = os.path.dirname(__file__)
|
||||||
|
|
||||||
if Ctx.folder_paths is None:
|
if Ctx.folder_paths is None:
|
||||||
@@ -129,7 +129,8 @@ class Ctx:
|
|||||||
if channel is not None:
|
if channel is not None:
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
|
|
||||||
unified_manager.reload()
|
asyncio.run(unified_manager.reload(cache_mode=self.mode, dont_wait=False))
|
||||||
|
asyncio.run(unified_manager.load_nightly(self.channel, self.mode))
|
||||||
|
|
||||||
def set_no_deps(self, no_deps):
|
def set_no_deps(self, no_deps):
|
||||||
self.no_deps = no_deps
|
self.no_deps = no_deps
|
||||||
@@ -142,14 +143,14 @@ class Ctx:
|
|||||||
if os.path.exists(extra_model_paths_yaml):
|
if os.path.exists(extra_model_paths_yaml):
|
||||||
utils.extra_config.load_extra_path_config(extra_model_paths_yaml)
|
utils.extra_config.load_extra_path_config(extra_model_paths_yaml)
|
||||||
|
|
||||||
context.update_user_directory(user_directory)
|
core.update_user_directory(user_directory)
|
||||||
|
|
||||||
if os.path.exists(context.manager_pip_overrides_path):
|
if os.path.exists(core.manager_pip_overrides_path):
|
||||||
with open(context.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(core.manager_pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
cm_global.pip_overrides = json.load(json_file)
|
cm_global.pip_overrides = json.load(json_file)
|
||||||
|
|
||||||
if os.path.exists(context.manager_pip_blacklist_path):
|
if os.path.exists(core.manager_pip_blacklist_path):
|
||||||
with open(context.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
with open(core.manager_pip_blacklist_path, 'r', encoding="UTF-8", errors="ignore") as f:
|
||||||
for x in f.readlines():
|
for x in f.readlines():
|
||||||
y = x.strip()
|
y = x.strip()
|
||||||
if y != '':
|
if y != '':
|
||||||
@@ -162,15 +163,15 @@ class Ctx:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_startup_scripts_path():
|
def get_startup_scripts_path():
|
||||||
return os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
return os.path.join(core.manager_startup_script_path, "install-scripts.txt")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_restore_snapshot_path():
|
def get_restore_snapshot_path():
|
||||||
return os.path.join(context.manager_startup_script_path, "restore-snapshot.json")
|
return os.path.join(core.manager_startup_script_path, "restore-snapshot.json")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_snapshot_path():
|
def get_snapshot_path():
|
||||||
return context.manager_snapshot_path
|
return core.manager_snapshot_path
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_custom_nodes_paths():
|
def get_custom_nodes_paths():
|
||||||
@@ -187,14 +188,9 @@ def install_node(node_spec_str, is_all=False, cnt_msg='', **kwargs):
|
|||||||
exit_on_fail = kwargs.get('exit_on_fail', False)
|
exit_on_fail = kwargs.get('exit_on_fail', False)
|
||||||
print(f"install_node exit on fail:{exit_on_fail}...")
|
print(f"install_node exit on fail:{exit_on_fail}...")
|
||||||
|
|
||||||
if unified_manager.is_url_like(node_spec_str):
|
if core.is_valid_url(node_spec_str):
|
||||||
# install via git URLs
|
# install via urls
|
||||||
repo_name = os.path.basename(node_spec_str)
|
res = asyncio.run(core.gitclone_install(node_spec_str, no_deps=cmd_ctx.no_deps))
|
||||||
if repo_name.endswith('.git'):
|
|
||||||
repo_name = repo_name[:-4]
|
|
||||||
res = asyncio.run(unified_manager.repo_install(
|
|
||||||
node_spec_str, repo_name, instant_execution=True, no_deps=cmd_ctx.no_deps
|
|
||||||
))
|
|
||||||
if not res.result:
|
if not res.result:
|
||||||
print(res.msg)
|
print(res.msg)
|
||||||
print(f"[bold red]ERROR: An error occurred while installing '{node_spec_str}'.[/bold red]")
|
print(f"[bold red]ERROR: An error occurred while installing '{node_spec_str}'.[/bold red]")
|
||||||
@@ -228,7 +224,7 @@ def install_node(node_spec_str, is_all=False, cnt_msg='', **kwargs):
|
|||||||
print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]")
|
print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]")
|
||||||
elif res.action == 'switch-cnr' and res.result:
|
elif res.action == 'switch-cnr' and res.result:
|
||||||
print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]")
|
print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]")
|
||||||
elif (res.action == 'switch-cnr' or res.action == 'install-cnr') and not res.result and cnr_utils.get_nodepack(node_name):
|
elif (res.action == 'switch-cnr' or res.action == 'install-cnr') and not res.result and node_name in unified_manager.cnr_map:
|
||||||
print(f"\nAvailable version of '{node_name}'")
|
print(f"\nAvailable version of '{node_name}'")
|
||||||
show_versions(node_name)
|
show_versions(node_name)
|
||||||
print("")
|
print("")
|
||||||
@@ -319,10 +315,10 @@ def update_parallel(nodes):
|
|||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
is_all = True
|
is_all = True
|
||||||
nodes = []
|
nodes = []
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
for x in unified_manager.active_nodes.keys():
|
||||||
for pack in packages:
|
nodes.append(x)
|
||||||
if pack.is_enabled:
|
for x in unified_manager.unknown_active_nodes.keys():
|
||||||
nodes.append(pack.id)
|
nodes.append(x+"@unknown")
|
||||||
else:
|
else:
|
||||||
nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui']]
|
nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui']]
|
||||||
|
|
||||||
@@ -420,60 +416,109 @@ def disable_node(node_spec_str: str, is_all=False, cnt_msg=''):
|
|||||||
|
|
||||||
|
|
||||||
def show_list(kind, simple=False):
|
def show_list(kind, simple=False):
|
||||||
"""
|
custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=cmd_ctx.channel, mode=cmd_ctx.mode))
|
||||||
Show installed nodepacks only with on-demand metadata retrieval
|
|
||||||
Supported kinds: 'installed', 'enabled', 'disabled'
|
|
||||||
"""
|
|
||||||
# Validate supported commands
|
|
||||||
if kind not in ['installed', 'enabled', 'disabled']:
|
|
||||||
print(f"[bold red]Unsupported: 'show {kind}'. Available options: installed/enabled/disabled[/bold red]")
|
|
||||||
print("Note: 'show all', 'show not-installed', and 'show cnr' are no longer supported.")
|
|
||||||
print("Use 'show installed' to see all installed packages.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get all installed packages from glob unified_manager
|
# collect not-installed unknown nodes
|
||||||
all_packages = []
|
not_installed_unknown_nodes = []
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
repo_unknown = {}
|
||||||
all_packages.extend(packages)
|
|
||||||
|
|
||||||
# Filter by status
|
|
||||||
if kind == 'enabled':
|
|
||||||
packages = [pkg for pkg in all_packages if pkg.is_enabled]
|
|
||||||
elif kind == 'disabled':
|
|
||||||
packages = [pkg for pkg in all_packages if pkg.is_disabled]
|
|
||||||
else: # 'installed'
|
|
||||||
packages = all_packages
|
|
||||||
|
|
||||||
# Display packages
|
for k, v in custom_nodes.items():
|
||||||
for package in sorted(packages, key=lambda x: x.id):
|
if 'cnr_latest' not in v:
|
||||||
# Basic info from InstalledNodePackage
|
if len(v['files']) == 1:
|
||||||
status = "[ ENABLED ]" if package.is_enabled else "[ DISABLED ]"
|
repo_url = v['files'][0]
|
||||||
|
node_name = repo_url.split('/')[-1]
|
||||||
# Enhanced info with on-demand CNR retrieval
|
if node_name not in unified_manager.unknown_inactive_nodes and node_name not in unified_manager.unknown_active_nodes:
|
||||||
display_name = package.id
|
not_installed_unknown_nodes.append(v)
|
||||||
author = "Unknown"
|
else:
|
||||||
version = package.version
|
repo_unknown[node_name] = v
|
||||||
|
|
||||||
# Try to get additional info from CNR for better display
|
processed = {}
|
||||||
if package.is_from_cnr:
|
unknown_processed = []
|
||||||
try:
|
|
||||||
cnr_info = cnr_utils.get_nodepack(package.id)
|
flag = kind in ['all', 'cnr', 'installed', 'enabled']
|
||||||
if cnr_info:
|
for k, v in unified_manager.active_nodes.items():
|
||||||
display_name = cnr_info.get('name', package.id)
|
if flag:
|
||||||
if 'publisher' in cnr_info and 'name' in cnr_info['publisher']:
|
cnr = unified_manager.cnr_map[k]
|
||||||
author = cnr_info['publisher']['name']
|
processed[k] = "[ ENABLED ] ", cnr['name'], k, cnr['publisher']['name'], v[0]
|
||||||
except Exception:
|
|
||||||
# Fallback to basic info if CNR lookup fails
|
|
||||||
pass
|
|
||||||
elif package.is_nightly:
|
|
||||||
version = "nightly"
|
|
||||||
elif package.is_unknown:
|
|
||||||
version = "unknown"
|
|
||||||
|
|
||||||
if simple:
|
|
||||||
print(f"{display_name}@{version}")
|
|
||||||
else:
|
else:
|
||||||
print(f"{status} {display_name:50} {package.id:30} (author: {author:20}) [{version}]")
|
processed[k] = None
|
||||||
|
|
||||||
|
if flag and kind != 'cnr':
|
||||||
|
for k, v in unified_manager.unknown_active_nodes.items():
|
||||||
|
item = repo_unknown.get(k)
|
||||||
|
|
||||||
|
if item is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
log_item = "[ ENABLED ] ", item['title'], k, item['author']
|
||||||
|
unknown_processed.append(log_item)
|
||||||
|
|
||||||
|
flag = kind in ['all', 'cnr', 'installed', 'disabled']
|
||||||
|
for k, v in unified_manager.cnr_inactive_nodes.items():
|
||||||
|
if k in processed:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if flag:
|
||||||
|
cnr = unified_manager.cnr_map[k]
|
||||||
|
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], ", ".join(list(v.keys()))
|
||||||
|
else:
|
||||||
|
processed[k] = None
|
||||||
|
|
||||||
|
for k, v in unified_manager.nightly_inactive_nodes.items():
|
||||||
|
if k in processed:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if flag:
|
||||||
|
cnr = unified_manager.cnr_map[k]
|
||||||
|
processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], 'nightly'
|
||||||
|
else:
|
||||||
|
processed[k] = None
|
||||||
|
|
||||||
|
if flag and kind != 'cnr':
|
||||||
|
for k, v in unified_manager.unknown_inactive_nodes.items():
|
||||||
|
item = repo_unknown.get(k)
|
||||||
|
|
||||||
|
if item is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
log_item = "[ DISABLED ] ", item['title'], k, item['author']
|
||||||
|
unknown_processed.append(log_item)
|
||||||
|
|
||||||
|
flag = kind in ['all', 'cnr', 'not-installed']
|
||||||
|
for k, v in unified_manager.cnr_map.items():
|
||||||
|
if k in processed:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if flag:
|
||||||
|
cnr = unified_manager.cnr_map[k]
|
||||||
|
ver_spec = v['latest_version']['version'] if 'latest_version' in v else '0.0.0'
|
||||||
|
processed[k] = "[ NOT INSTALLED ] ", cnr['name'], k, cnr['publisher']['name'], ver_spec
|
||||||
|
else:
|
||||||
|
processed[k] = None
|
||||||
|
|
||||||
|
if flag and kind != 'cnr':
|
||||||
|
for x in not_installed_unknown_nodes:
|
||||||
|
if len(x['files']) == 1:
|
||||||
|
node_id = os.path.basename(x['files'][0])
|
||||||
|
log_item = "[ NOT INSTALLED ] ", x['title'], node_id, x['author']
|
||||||
|
unknown_processed.append(log_item)
|
||||||
|
|
||||||
|
for x in processed.values():
|
||||||
|
if x is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
prefix, title, short_id, author, ver_spec = x
|
||||||
|
if simple:
|
||||||
|
print(title+'@'+ver_spec)
|
||||||
|
else:
|
||||||
|
print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) \\[{ver_spec}]")
|
||||||
|
|
||||||
|
for x in unknown_processed:
|
||||||
|
prefix, title, short_id, author = x
|
||||||
|
if simple:
|
||||||
|
print(title+'@unknown')
|
||||||
|
else:
|
||||||
|
print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) [UNKNOWN]")
|
||||||
|
|
||||||
|
|
||||||
async def show_snapshot(simple_mode=False):
|
async def show_snapshot(simple_mode=False):
|
||||||
@@ -514,14 +559,37 @@ async def auto_save_snapshot():
|
|||||||
|
|
||||||
|
|
||||||
def get_all_installed_node_specs():
|
def get_all_installed_node_specs():
|
||||||
"""
|
|
||||||
Get all installed node specifications using glob InstalledNodePackage data structure
|
|
||||||
"""
|
|
||||||
res = []
|
res = []
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
processed = set()
|
||||||
for pack in packages:
|
for k, v in unified_manager.active_nodes.items():
|
||||||
node_spec_str = f"{pack.id}@{pack.version}"
|
node_spec_str = f"{k}@{v[0]}"
|
||||||
|
res.append(node_spec_str)
|
||||||
|
processed.add(k)
|
||||||
|
|
||||||
|
for k in unified_manager.cnr_inactive_nodes.keys():
|
||||||
|
if k in processed:
|
||||||
|
continue
|
||||||
|
|
||||||
|
latest = unified_manager.get_from_cnr_inactive_nodes(k)
|
||||||
|
if latest is not None:
|
||||||
|
node_spec_str = f"{k}@{str(latest[0])}"
|
||||||
res.append(node_spec_str)
|
res.append(node_spec_str)
|
||||||
|
|
||||||
|
for k in unified_manager.nightly_inactive_nodes.keys():
|
||||||
|
if k in processed:
|
||||||
|
continue
|
||||||
|
|
||||||
|
node_spec_str = f"{k}@nightly"
|
||||||
|
res.append(node_spec_str)
|
||||||
|
|
||||||
|
for k in unified_manager.unknown_active_nodes.keys():
|
||||||
|
node_spec_str = f"{k}@unknown"
|
||||||
|
res.append(node_spec_str)
|
||||||
|
|
||||||
|
for k in unified_manager.unknown_inactive_nodes.keys():
|
||||||
|
node_spec_str = f"{k}@unknown"
|
||||||
|
res.append(node_spec_str)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
@@ -590,7 +658,7 @@ def install(
|
|||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
cmd_ctx.set_no_deps(no_deps)
|
cmd_ctx.set_no_deps(no_deps)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -628,7 +696,7 @@ def reinstall(
|
|||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
cmd_ctx.set_no_deps(no_deps)
|
cmd_ctx.set_no_deps(no_deps)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, act=reinstall_node)
|
for_each_nodes(nodes, act=reinstall_node)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -682,7 +750,7 @@ def update(
|
|||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
|
|
||||||
for x in nodes:
|
for x in nodes:
|
||||||
if x.lower() in ['comfyui', 'comfy', 'all']:
|
if x.lower() in ['comfyui', 'comfy', 'all']:
|
||||||
@@ -783,7 +851,7 @@ def fix(
|
|||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for_each_nodes(nodes, fix_node, allow_all=True)
|
for_each_nodes(nodes, fix_node, allow_all=True)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -1060,7 +1128,7 @@ def restore_snapshot(
|
|||||||
print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]")
|
print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
try:
|
try:
|
||||||
asyncio.run(core.restore_snapshot(snapshot_path, extras))
|
asyncio.run(core.restore_snapshot(snapshot_path, extras))
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -1092,7 +1160,7 @@ def restore_dependencies(
|
|||||||
total = len(node_paths)
|
total = len(node_paths)
|
||||||
i = 1
|
i = 1
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for x in node_paths:
|
for x in node_paths:
|
||||||
print("----------------------------------------------------------------------------------------------------")
|
print("----------------------------------------------------------------------------------------------------")
|
||||||
print(f"Restoring [{i}/{total}]: {x}")
|
print(f"Restoring [{i}/{total}]: {x}")
|
||||||
@@ -1111,7 +1179,7 @@ def post_install(
|
|||||||
):
|
):
|
||||||
path = os.path.expanduser(path)
|
path = os.path.expanduser(path)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
unified_manager.execute_install_script('', path, instant_execution=True)
|
unified_manager.execute_install_script('', path, instant_execution=True)
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
@@ -1151,11 +1219,11 @@ def install_deps(
|
|||||||
with open(deps, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(deps, 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
try:
|
try:
|
||||||
json_obj = json.load(json_file)
|
json_obj = json.load(json_file)
|
||||||
except Exception:
|
except:
|
||||||
print(f"[bold red]Invalid json file: {deps}[/bold red]")
|
print(f"[bold red]Invalid json file: {deps}[/bold red]")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, core.manager_files_path)
|
||||||
for k in json_obj['custom_nodes'].keys():
|
for k in json_obj['custom_nodes'].keys():
|
||||||
state = core.simple_check_custom_node(k)
|
state = core.simple_check_custom_node(k)
|
||||||
if state == 'installed':
|
if state == 'installed':
|
||||||
@@ -1197,25 +1265,19 @@ def export_custom_node_ids(
|
|||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
|
|
||||||
with open(path, "w", encoding='utf-8') as output_file:
|
with open(path, "w", encoding='utf-8') as output_file:
|
||||||
# Export CNR package IDs using cnr_utils
|
for x in unified_manager.cnr_map.keys():
|
||||||
try:
|
print(x, file=output_file)
|
||||||
all_cnr = cnr_utils.get_all_nodepackages()
|
|
||||||
for package_id in all_cnr.keys():
|
|
||||||
print(package_id, file=output_file)
|
|
||||||
except Exception:
|
|
||||||
# If CNR lookup fails, continue with installed packages
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Export installed packages that are not from CNR
|
custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=cmd_ctx.channel, mode=cmd_ctx.mode))
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
for x in custom_nodes.values():
|
||||||
for pack in packages:
|
if 'cnr_latest' not in x:
|
||||||
if pack.is_unknown or pack.is_nightly:
|
if len(x['files']) == 1:
|
||||||
version_suffix = "@unknown" if pack.is_unknown else "@nightly"
|
repo_url = x['files'][0]
|
||||||
print(f"{pack.id}{version_suffix}", file=output_file)
|
node_id = repo_url.split('/')[-1]
|
||||||
|
print(f"{node_id}@unknown", file=output_file)
|
||||||
|
|
||||||
|
if 'id' in x:
|
||||||
def main():
|
print(f"{x['id']}@unknown", file=output_file)
|
||||||
app()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
from aiohttp import web
|
|
||||||
from .common.manager_security import HANDLER_POLICY
|
|
||||||
from .common import manager_security
|
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
|
|
||||||
def prestartup():
|
|
||||||
from . import prestartup_script # noqa: F401
|
|
||||||
logging.info('[PRE] ComfyUI-Manager')
|
|
||||||
|
|
||||||
|
|
||||||
def start():
|
|
||||||
logging.info('[START] ComfyUI-Manager')
|
|
||||||
from .common import cm_global # noqa: F401
|
|
||||||
|
|
||||||
if args.enable_manager:
|
|
||||||
if args.enable_manager_legacy_ui:
|
|
||||||
try:
|
|
||||||
from .legacy import manager_server # noqa: F401
|
|
||||||
from .legacy import share_3rdparty # noqa: F401
|
|
||||||
from .legacy import manager_core as core
|
|
||||||
import nodes
|
|
||||||
|
|
||||||
logging.info("[ComfyUI-Manager] Legacy UI is enabled.")
|
|
||||||
nodes.EXTENSION_WEB_DIRS['comfyui-manager-legacy'] = os.path.join(os.path.dirname(__file__), 'js')
|
|
||||||
except Exception as e:
|
|
||||||
print("Error enabling legacy ComfyUI Manager frontend:", e)
|
|
||||||
core = None
|
|
||||||
else:
|
|
||||||
from .glob import manager_server # noqa: F401
|
|
||||||
from .glob import share_3rdparty # noqa: F401
|
|
||||||
from .glob import manager_core as core
|
|
||||||
|
|
||||||
if core is not None:
|
|
||||||
manager_security.is_personal_cloud_mode = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
|
||||||
|
|
||||||
|
|
||||||
def should_be_disabled(fullpath:str) -> bool:
|
|
||||||
"""
|
|
||||||
1. Disables the legacy ComfyUI-Manager.
|
|
||||||
2. The blocklist can be expanded later based on policies.
|
|
||||||
"""
|
|
||||||
if args.enable_manager:
|
|
||||||
# In cases where installation is done via a zip archive, the directory name may not be comfyui-manager, and it may not contain a git repository.
|
|
||||||
# It is assumed that any installed legacy ComfyUI-Manager will have at least 'comfyui-manager' in its directory name.
|
|
||||||
dir_name = os.path.basename(fullpath).lower()
|
|
||||||
if 'comfyui-manager' in dir_name:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip(request):
|
|
||||||
peername = request.transport.get_extra_info("peername")
|
|
||||||
if peername is not None:
|
|
||||||
host, port = peername
|
|
||||||
return host
|
|
||||||
|
|
||||||
return "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
def create_middleware():
|
|
||||||
connected_clients = set()
|
|
||||||
is_local_mode = manager_security.is_loopback(args.listen)
|
|
||||||
|
|
||||||
@web.middleware
|
|
||||||
async def manager_middleware(request: web.Request, handler):
|
|
||||||
nonlocal connected_clients
|
|
||||||
|
|
||||||
# security policy for remote environments
|
|
||||||
prev_client_count = len(connected_clients)
|
|
||||||
client_ip = get_client_ip(request)
|
|
||||||
connected_clients.add(client_ip)
|
|
||||||
next_client_count = len(connected_clients)
|
|
||||||
|
|
||||||
if prev_client_count == 1 and next_client_count > 1:
|
|
||||||
manager_security.multiple_remote_alert()
|
|
||||||
|
|
||||||
policy = manager_security.get_handler_policy(handler)
|
|
||||||
is_banned = False
|
|
||||||
|
|
||||||
# policy check
|
|
||||||
if len(connected_clients) > 1:
|
|
||||||
if is_local_mode:
|
|
||||||
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NON_LOCAL in policy:
|
|
||||||
is_banned = True
|
|
||||||
if HANDLER_POLICY.MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD in policy:
|
|
||||||
is_banned = not manager_security.is_personal_cloud_mode
|
|
||||||
|
|
||||||
if HANDLER_POLICY.BANNED in policy:
|
|
||||||
is_banned = True
|
|
||||||
|
|
||||||
if is_banned:
|
|
||||||
logging.warning(f"[Manager] Banning request from {client_ip}: {request.path}")
|
|
||||||
response = web.Response(text="[Manager] This request is banned.", status=403)
|
|
||||||
else:
|
|
||||||
response: web.Response = await handler(request)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
return manager_middleware
|
|
||||||
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
default::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main
|
|
||||||
recent::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/new
|
|
||||||
legacy::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/legacy
|
|
||||||
forked::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/forked
|
|
||||||
dev::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/dev
|
|
||||||
tutorial::https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main/node_db/tutorial
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# ComfyUI-Manager: Core Backend (glob)
|
|
||||||
|
|
||||||
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
|
||||||
|
|
||||||
## Core Modules
|
|
||||||
|
|
||||||
- **manager_downloader.py**: Handles downloading operations for models, extensions, and other resources.
|
|
||||||
- **manager_util.py**: Provides utility functions used throughout the system.
|
|
||||||
|
|
||||||
## Specialized Modules
|
|
||||||
|
|
||||||
- **cm_global.py**: Maintains global variables and state management across the system.
|
|
||||||
- **cnr_utils.py**: Helper utilities for interacting with the custom node registry (CNR).
|
|
||||||
- **git_utils.py**: Git-specific utilities for repository operations.
|
|
||||||
- **node_package.py**: Handles the packaging and installation of node extensions.
|
|
||||||
- **security_check.py**: Implements the multi-level security system for installation safety.
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from . import manager_util
|
|
||||||
import toml
|
|
||||||
import git
|
|
||||||
|
|
||||||
|
|
||||||
# read env vars
|
|
||||||
comfy_path: str = os.environ.get('COMFYUI_PATH')
|
|
||||||
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
|
||||||
|
|
||||||
if comfy_path is None:
|
|
||||||
try:
|
|
||||||
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
|
||||||
os.environ['COMFYUI_PATH'] = comfy_path
|
|
||||||
except Exception:
|
|
||||||
logging.error("[ComfyUI-Manager] environment variable 'COMFYUI_PATH' is not specified.")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
if comfy_base_path is None:
|
|
||||||
comfy_base_path = comfy_path
|
|
||||||
|
|
||||||
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
|
|
||||||
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
|
|
||||||
|
|
||||||
manager_files_path = None
|
|
||||||
manager_config_path = None
|
|
||||||
manager_channel_list_path = None
|
|
||||||
manager_startup_script_path:str = None
|
|
||||||
manager_snapshot_path = None
|
|
||||||
manager_pip_overrides_path = None
|
|
||||||
manager_pip_blacklist_path = None
|
|
||||||
manager_components_path = None
|
|
||||||
manager_batch_history_path = None
|
|
||||||
|
|
||||||
def update_user_directory(user_dir):
|
|
||||||
global manager_files_path
|
|
||||||
global manager_config_path
|
|
||||||
global manager_channel_list_path
|
|
||||||
global manager_startup_script_path
|
|
||||||
global manager_snapshot_path
|
|
||||||
global manager_pip_overrides_path
|
|
||||||
global manager_pip_blacklist_path
|
|
||||||
global manager_components_path
|
|
||||||
global manager_batch_history_path
|
|
||||||
|
|
||||||
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
|
||||||
if not os.path.exists(manager_files_path):
|
|
||||||
os.makedirs(manager_files_path)
|
|
||||||
|
|
||||||
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
|
|
||||||
if not os.path.exists(manager_snapshot_path):
|
|
||||||
os.makedirs(manager_snapshot_path)
|
|
||||||
|
|
||||||
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
|
|
||||||
if not os.path.exists(manager_startup_script_path):
|
|
||||||
os.makedirs(manager_startup_script_path)
|
|
||||||
|
|
||||||
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
|
||||||
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
|
|
||||||
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
|
||||||
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
|
||||||
manager_components_path = os.path.join(manager_files_path, "components")
|
|
||||||
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
|
|
||||||
manager_batch_history_path = os.path.join(manager_files_path, "batch_history")
|
|
||||||
|
|
||||||
if not os.path.exists(manager_util.cache_dir):
|
|
||||||
os.makedirs(manager_util.cache_dir)
|
|
||||||
|
|
||||||
if not os.path.exists(manager_batch_history_path):
|
|
||||||
os.makedirs(manager_batch_history_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
import folder_paths
|
|
||||||
update_user_directory(folder_paths.get_user_directory())
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
# fallback:
|
|
||||||
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
|
|
||||||
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_comfyui_ver():
|
|
||||||
"""
|
|
||||||
Extract version from pyproject.toml
|
|
||||||
"""
|
|
||||||
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
|
||||||
if not os.path.exists(toml_path):
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
with open(toml_path, "r", encoding="utf-8") as f:
|
|
||||||
data = toml.load(f)
|
|
||||||
|
|
||||||
project = data.get('project', {})
|
|
||||||
return project.get('version')
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_comfyui_tag():
|
|
||||||
try:
|
|
||||||
with git.Repo(comfy_path) as repo:
|
|
||||||
return repo.git.describe('--tags')
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
import enum
|
|
||||||
|
|
||||||
class NetworkMode(enum.Enum):
|
|
||||||
PUBLIC = "public"
|
|
||||||
PRIVATE = "private"
|
|
||||||
OFFLINE = "offline"
|
|
||||||
PERSONAL_CLOUD = "personal_cloud"
|
|
||||||
|
|
||||||
class SecurityLevel(enum.Enum):
|
|
||||||
STRONG = "strong"
|
|
||||||
NORMAL = "normal"
|
|
||||||
NORMAL_MINUS = "normal-minus"
|
|
||||||
WEAK = "weak"
|
|
||||||
|
|
||||||
class DBMode(enum.Enum):
|
|
||||||
LOCAL = "local"
|
|
||||||
CACHE = "cache"
|
|
||||||
REMOTE = "remote"
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
is_personal_cloud_mode = False
|
|
||||||
handler_policy = {}
|
|
||||||
|
|
||||||
class HANDLER_POLICY(Enum):
|
|
||||||
MULTIPLE_REMOTE_BAN_NON_LOCAL = 1
|
|
||||||
MULTIPLE_REMOTE_BAN_NOT_PERSONAL_CLOUD = 2
|
|
||||||
BANNED = 3
|
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
|
||||||
import ipaddress
|
|
||||||
try:
|
|
||||||
return ipaddress.ip_address(address).is_loopback
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def do_nothing():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def get_handler_policy(x):
|
|
||||||
return handler_policy.get(x) or set()
|
|
||||||
|
|
||||||
def add_handler_policy(x, policy):
|
|
||||||
s = handler_policy.get(x)
|
|
||||||
if s is None:
|
|
||||||
s = set()
|
|
||||||
handler_policy[x] = s
|
|
||||||
|
|
||||||
s.add(policy)
|
|
||||||
|
|
||||||
|
|
||||||
multiple_remote_alert = do_nothing
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
# Data Models
|
|
||||||
|
|
||||||
This directory contains Pydantic models for ComfyUI Manager, providing type safety, validation, and serialization for the API and internal data structures.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
- `generated_models.py` - All models auto-generated from OpenAPI spec
|
|
||||||
- `__init__.py` - Package exports for all models
|
|
||||||
|
|
||||||
**Note**: All models are now auto-generated from the OpenAPI specification. Manual model files (`task_queue.py`, `state_management.py`) have been deprecated in favor of a single source of truth.
|
|
||||||
|
|
||||||
## Generating Types from OpenAPI
|
|
||||||
|
|
||||||
The state management models are automatically generated from the OpenAPI specification using `datamodel-codegen`. This ensures type safety and consistency between the API specification and the Python code.
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
Install the code generator:
|
|
||||||
```bash
|
|
||||||
pipx install datamodel-code-generator
|
|
||||||
```
|
|
||||||
|
|
||||||
### Generation Command
|
|
||||||
|
|
||||||
To regenerate all models after updating the OpenAPI spec:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
datamodel-codegen \
|
|
||||||
--use-subclass-enum \
|
|
||||||
--field-constraints \
|
|
||||||
--strict-types bytes \
|
|
||||||
--use-double-quotes \
|
|
||||||
--input openapi.yaml \
|
|
||||||
--output comfyui_manager/data_models/generated_models.py \
|
|
||||||
--output-model-type pydantic_v2.BaseModel
|
|
||||||
```
|
|
||||||
|
|
||||||
### When to Regenerate
|
|
||||||
|
|
||||||
You should regenerate the models when:
|
|
||||||
|
|
||||||
1. **Adding new API endpoints** that return new data structures
|
|
||||||
2. **Modifying existing schemas** in the OpenAPI specification
|
|
||||||
3. **Adding new state management features** that require new models
|
|
||||||
|
|
||||||
### Important Notes
|
|
||||||
|
|
||||||
- **Single source of truth**: All models are now generated from `openapi.yaml`
|
|
||||||
- **No manual models**: All previously manual models have been migrated to the OpenAPI spec
|
|
||||||
- **OpenAPI requirements**: New schemas must be referenced in API paths to be generated by datamodel-codegen
|
|
||||||
- **Validation**: Always validate the OpenAPI spec before generation:
|
|
||||||
```bash
|
|
||||||
python3 -c "import yaml; yaml.safe_load(open('openapi.yaml'))"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example: Adding New State Models
|
|
||||||
|
|
||||||
1. Add your schema to `openapi.yaml` under `components/schemas/`
|
|
||||||
2. Reference the schema in an API endpoint response
|
|
||||||
3. Run the generation command above
|
|
||||||
4. Update `__init__.py` to export the new models
|
|
||||||
5. Import and use the models in your code
|
|
||||||
|
|
||||||
### Troubleshooting
|
|
||||||
|
|
||||||
- **Models not generated**: Ensure schemas are under `components/schemas/` (not `parameters/`)
|
|
||||||
- **Missing models**: Verify schemas are referenced in at least one API path
|
|
||||||
- **Import errors**: Check that new models are added to `__init__.py` exports
|
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
"""
|
|
||||||
Data models for ComfyUI Manager.
|
|
||||||
|
|
||||||
This package contains Pydantic models used throughout the ComfyUI Manager
|
|
||||||
for data validation, serialization, and type safety.
|
|
||||||
|
|
||||||
All models are auto-generated from the OpenAPI specification to ensure
|
|
||||||
consistency between the API and implementation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .generated_models import (
|
|
||||||
# Core Task Queue Models
|
|
||||||
QueueTaskItem,
|
|
||||||
TaskHistoryItem,
|
|
||||||
TaskStateMessage,
|
|
||||||
TaskExecutionStatus,
|
|
||||||
|
|
||||||
# WebSocket Message Models
|
|
||||||
MessageTaskDone,
|
|
||||||
MessageTaskStarted,
|
|
||||||
MessageTaskFailed,
|
|
||||||
MessageUpdate,
|
|
||||||
ManagerMessageName,
|
|
||||||
|
|
||||||
# State Management Models
|
|
||||||
BatchExecutionRecord,
|
|
||||||
ComfyUISystemState,
|
|
||||||
BatchOperation,
|
|
||||||
InstalledNodeInfo,
|
|
||||||
InstalledModelInfo,
|
|
||||||
ComfyUIVersionInfo,
|
|
||||||
|
|
||||||
# Import Fail Info Models
|
|
||||||
ImportFailInfoBulkRequest,
|
|
||||||
ImportFailInfoBulkResponse,
|
|
||||||
ImportFailInfoItem,
|
|
||||||
ImportFailInfoItem1,
|
|
||||||
|
|
||||||
# Other models
|
|
||||||
OperationType,
|
|
||||||
OperationResult,
|
|
||||||
ManagerPackInfo,
|
|
||||||
ManagerPackInstalled,
|
|
||||||
SelectedVersion,
|
|
||||||
ManagerChannel,
|
|
||||||
ManagerDatabaseSource,
|
|
||||||
ManagerPackState,
|
|
||||||
ManagerPackInstallType,
|
|
||||||
ManagerPack,
|
|
||||||
InstallPackParams,
|
|
||||||
UpdatePackParams,
|
|
||||||
UpdateAllPacksParams,
|
|
||||||
UpdateComfyUIParams,
|
|
||||||
FixPackParams,
|
|
||||||
UninstallPackParams,
|
|
||||||
DisablePackParams,
|
|
||||||
EnablePackParams,
|
|
||||||
UpdateAllQueryParams,
|
|
||||||
UpdateComfyUIQueryParams,
|
|
||||||
ComfyUISwitchVersionQueryParams,
|
|
||||||
QueueStatus,
|
|
||||||
ManagerMappings,
|
|
||||||
ModelMetadata,
|
|
||||||
NodePackageMetadata,
|
|
||||||
SnapshotItem,
|
|
||||||
Error,
|
|
||||||
InstalledPacksResponse,
|
|
||||||
HistoryResponse,
|
|
||||||
HistoryListResponse,
|
|
||||||
InstallType,
|
|
||||||
SecurityLevel,
|
|
||||||
RiskLevel,
|
|
||||||
NetworkMode
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
# Core Task Queue Models
|
|
||||||
"QueueTaskItem",
|
|
||||||
"TaskHistoryItem",
|
|
||||||
"TaskStateMessage",
|
|
||||||
"TaskExecutionStatus",
|
|
||||||
|
|
||||||
# WebSocket Message Models
|
|
||||||
"MessageTaskDone",
|
|
||||||
"MessageTaskStarted",
|
|
||||||
"MessageTaskFailed",
|
|
||||||
"MessageUpdate",
|
|
||||||
"ManagerMessageName",
|
|
||||||
|
|
||||||
# State Management Models
|
|
||||||
"BatchExecutionRecord",
|
|
||||||
"ComfyUISystemState",
|
|
||||||
"BatchOperation",
|
|
||||||
"InstalledNodeInfo",
|
|
||||||
"InstalledModelInfo",
|
|
||||||
"ComfyUIVersionInfo",
|
|
||||||
|
|
||||||
# Import Fail Info Models
|
|
||||||
"ImportFailInfoBulkRequest",
|
|
||||||
"ImportFailInfoBulkResponse",
|
|
||||||
"ImportFailInfoItem",
|
|
||||||
"ImportFailInfoItem1",
|
|
||||||
|
|
||||||
# Other models
|
|
||||||
"OperationType",
|
|
||||||
"OperationResult",
|
|
||||||
"ManagerPackInfo",
|
|
||||||
"ManagerPackInstalled",
|
|
||||||
"SelectedVersion",
|
|
||||||
"ManagerChannel",
|
|
||||||
"ManagerDatabaseSource",
|
|
||||||
"ManagerPackState",
|
|
||||||
"ManagerPackInstallType",
|
|
||||||
"ManagerPack",
|
|
||||||
"InstallPackParams",
|
|
||||||
"UpdatePackParams",
|
|
||||||
"UpdateAllPacksParams",
|
|
||||||
"UpdateComfyUIParams",
|
|
||||||
"FixPackParams",
|
|
||||||
"UninstallPackParams",
|
|
||||||
"DisablePackParams",
|
|
||||||
"EnablePackParams",
|
|
||||||
"UpdateAllQueryParams",
|
|
||||||
"UpdateComfyUIQueryParams",
|
|
||||||
"ComfyUISwitchVersionQueryParams",
|
|
||||||
"QueueStatus",
|
|
||||||
"ManagerMappings",
|
|
||||||
"ModelMetadata",
|
|
||||||
"NodePackageMetadata",
|
|
||||||
"SnapshotItem",
|
|
||||||
"Error",
|
|
||||||
"InstalledPacksResponse",
|
|
||||||
"HistoryResponse",
|
|
||||||
"HistoryListResponse",
|
|
||||||
"InstallType",
|
|
||||||
"SecurityLevel",
|
|
||||||
"RiskLevel",
|
|
||||||
"NetworkMode",
|
|
||||||
]
|
|
||||||
@@ -1,570 +0,0 @@
|
|||||||
# generated by datamodel-codegen:
|
|
||||||
# filename: openapi.yaml
|
|
||||||
# timestamp: 2025-11-01T04:21:38+00:00
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Any, Dict, List, Optional, Union
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, RootModel
|
|
||||||
|
|
||||||
|
|
||||||
class OperationType(str, Enum):
|
|
||||||
install = "install"
|
|
||||||
uninstall = "uninstall"
|
|
||||||
update = "update"
|
|
||||||
update_comfyui = "update-comfyui"
|
|
||||||
fix = "fix"
|
|
||||||
disable = "disable"
|
|
||||||
enable = "enable"
|
|
||||||
install_model = "install-model"
|
|
||||||
|
|
||||||
|
|
||||||
class OperationResult(str, Enum):
|
|
||||||
success = "success"
|
|
||||||
failed = "failed"
|
|
||||||
skipped = "skipped"
|
|
||||||
error = "error"
|
|
||||||
skip = "skip"
|
|
||||||
|
|
||||||
|
|
||||||
class TaskExecutionStatus(BaseModel):
|
|
||||||
status_str: OperationResult
|
|
||||||
completed: bool = Field(..., description="Whether the task completed")
|
|
||||||
messages: List[str] = Field(..., description="Additional status messages")
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMessageName(str, Enum):
|
|
||||||
cm_task_completed = "cm-task-completed"
|
|
||||||
cm_task_started = "cm-task-started"
|
|
||||||
cm_queue_status = "cm-queue-status"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInfo(BaseModel):
|
|
||||||
id: str = Field(
|
|
||||||
...,
|
|
||||||
description="Either github-author/github-repo or name of pack from the registry",
|
|
||||||
)
|
|
||||||
version: str = Field(..., description="Semantic version or Git commit hash")
|
|
||||||
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstalled(BaseModel):
|
|
||||||
ver: str = Field(
|
|
||||||
...,
|
|
||||||
description="The version of the pack that is installed (Git commit hash or semantic version)",
|
|
||||||
)
|
|
||||||
cnr_id: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description="The name of the pack if installed from the registry (normalized lowercase)",
|
|
||||||
)
|
|
||||||
original_name: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description="The original case-preserved name of the pack from the registry",
|
|
||||||
)
|
|
||||||
aux_id: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description="The name of the pack if installed from github (author/repo-name format)",
|
|
||||||
)
|
|
||||||
enabled: bool = Field(..., description="Whether the pack is enabled")
|
|
||||||
|
|
||||||
|
|
||||||
class SelectedVersion(str, Enum):
|
|
||||||
latest = "latest"
|
|
||||||
nightly = "nightly"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerChannel(str, Enum):
|
|
||||||
default = "default"
|
|
||||||
recent = "recent"
|
|
||||||
legacy = "legacy"
|
|
||||||
forked = "forked"
|
|
||||||
dev = "dev"
|
|
||||||
tutorial = "tutorial"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerDatabaseSource(str, Enum):
|
|
||||||
remote = "remote"
|
|
||||||
local = "local"
|
|
||||||
cache = "cache"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackState(str, Enum):
|
|
||||||
installed = "installed"
|
|
||||||
disabled = "disabled"
|
|
||||||
not_installed = "not_installed"
|
|
||||||
import_failed = "import_failed"
|
|
||||||
needs_update = "needs_update"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPackInstallType(str, Enum):
|
|
||||||
git_clone = "git-clone"
|
|
||||||
copy = "copy"
|
|
||||||
cnr = "cnr"
|
|
||||||
|
|
||||||
|
|
||||||
class SecurityLevel(str, Enum):
|
|
||||||
strong = "strong"
|
|
||||||
normal = "normal"
|
|
||||||
normal_ = "normal-"
|
|
||||||
weak = "weak"
|
|
||||||
|
|
||||||
|
|
||||||
class NetworkMode(str, Enum):
|
|
||||||
public = "public"
|
|
||||||
private = "private"
|
|
||||||
offline = "offline"
|
|
||||||
|
|
||||||
|
|
||||||
class RiskLevel(str, Enum):
|
|
||||||
block = "block"
|
|
||||||
high_ = "high+"
|
|
||||||
high = "high"
|
|
||||||
middle_ = "middle+"
|
|
||||||
middle = "middle"
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateState(Enum):
|
|
||||||
false = "false"
|
|
||||||
true = "true"
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerPack(ManagerPackInfo):
|
|
||||||
author: Optional[str] = Field(
|
|
||||||
None, description="Pack author name or 'Unclaimed' if added via GitHub crawl"
|
|
||||||
)
|
|
||||||
files: Optional[List[str]] = Field(
|
|
||||||
None,
|
|
||||||
description="Repository URLs for installation (typically contains one GitHub URL)",
|
|
||||||
)
|
|
||||||
reference: Optional[str] = Field(
|
|
||||||
None, description="The type of installation reference"
|
|
||||||
)
|
|
||||||
title: Optional[str] = Field(None, description="The display name of the pack")
|
|
||||||
cnr_latest: Optional[SelectedVersion] = None
|
|
||||||
repository: Optional[str] = Field(None, description="GitHub repository URL")
|
|
||||||
state: Optional[ManagerPackState] = None
|
|
||||||
update_state: Optional[UpdateState] = Field(
|
|
||||||
None, alias="update-state", description="Update availability status"
|
|
||||||
)
|
|
||||||
stars: Optional[int] = Field(None, description="GitHub stars count")
|
|
||||||
last_update: Optional[datetime] = Field(None, description="Last update timestamp")
|
|
||||||
health: Optional[str] = Field(None, description="Health status of the pack")
|
|
||||||
description: Optional[str] = Field(None, description="Pack description")
|
|
||||||
trust: Optional[bool] = Field(None, description="Whether the pack is trusted")
|
|
||||||
install_type: Optional[ManagerPackInstallType] = None
|
|
||||||
|
|
||||||
|
|
||||||
class InstallPackParams(ManagerPackInfo):
|
|
||||||
selected_version: Union[str, SelectedVersion] = Field(
|
|
||||||
..., description="Semantic version, Git commit hash, latest, or nightly"
|
|
||||||
)
|
|
||||||
repository: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description="GitHub repository URL (required if selected_version is nightly)",
|
|
||||||
)
|
|
||||||
pip: Optional[List[str]] = Field(None, description="PyPi dependency names")
|
|
||||||
mode: Optional[ManagerDatabaseSource] = None
|
|
||||||
channel: Optional[ManagerChannel] = None
|
|
||||||
skip_post_install: Optional[bool] = Field(
|
|
||||||
None, description="Whether to skip post-installation steps"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateAllPacksParams(BaseModel):
|
|
||||||
mode: Optional[ManagerDatabaseSource] = None
|
|
||||||
ui_id: Optional[str] = Field(None, description="Task ID - generated internally")
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatePackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description="Name of the node package to update")
|
|
||||||
node_ver: Optional[str] = Field(
|
|
||||||
None, description="Current version of the node package"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateComfyUIParams(BaseModel):
|
|
||||||
is_stable: Optional[bool] = Field(
|
|
||||||
True,
|
|
||||||
description="Whether to update to stable version (true) or nightly (false)",
|
|
||||||
)
|
|
||||||
target_version: Optional[str] = Field(
|
|
||||||
None,
|
|
||||||
description="Specific version to switch to (for version switching operations)",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FixPackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description="Name of the node package to fix")
|
|
||||||
node_ver: str = Field(..., description="Version of the node package")
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallPackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description="Name of the node package to uninstall")
|
|
||||||
is_unknown: Optional[bool] = Field(
|
|
||||||
False, description="Whether this is an unknown/unregistered package"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DisablePackParams(BaseModel):
|
|
||||||
node_name: str = Field(..., description="Name of the node package to disable")
|
|
||||||
is_unknown: Optional[bool] = Field(
|
|
||||||
False, description="Whether this is an unknown/unregistered package"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EnablePackParams(BaseModel):
|
|
||||||
cnr_id: str = Field(
|
|
||||||
..., description="ComfyUI Node Registry ID of the package to enable"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateAllQueryParams(BaseModel):
|
|
||||||
client_id: str = Field(
|
|
||||||
..., description="Client identifier that initiated the request"
|
|
||||||
)
|
|
||||||
ui_id: str = Field(..., description="Base UI identifier for task tracking")
|
|
||||||
mode: Optional[ManagerDatabaseSource] = None
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateComfyUIQueryParams(BaseModel):
|
|
||||||
client_id: str = Field(
|
|
||||||
..., description="Client identifier that initiated the request"
|
|
||||||
)
|
|
||||||
ui_id: str = Field(..., description="UI identifier for task tracking")
|
|
||||||
stable: Optional[bool] = Field(
|
|
||||||
True,
|
|
||||||
description="Whether to update to stable version (true) or nightly (false)",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComfyUISwitchVersionQueryParams(BaseModel):
|
|
||||||
ver: str = Field(..., description="Version to switch to")
|
|
||||||
client_id: str = Field(
|
|
||||||
..., description="Client identifier that initiated the request"
|
|
||||||
)
|
|
||||||
ui_id: str = Field(..., description="UI identifier for task tracking")
|
|
||||||
|
|
||||||
|
|
||||||
class QueueStatus(BaseModel):
|
|
||||||
total_count: int = Field(
|
|
||||||
..., description="Total number of tasks (pending + running)"
|
|
||||||
)
|
|
||||||
done_count: int = Field(..., description="Number of completed tasks")
|
|
||||||
in_progress_count: int = Field(..., description="Number of tasks currently running")
|
|
||||||
pending_count: Optional[int] = Field(
|
|
||||||
None, description="Number of tasks waiting to be executed"
|
|
||||||
)
|
|
||||||
is_processing: bool = Field(..., description="Whether the task worker is active")
|
|
||||||
client_id: Optional[str] = Field(
|
|
||||||
None, description="Client ID (when filtered by client)"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings1(BaseModel):
|
|
||||||
title_aux: Optional[str] = Field(None, description="The display name of the pack")
|
|
||||||
|
|
||||||
|
|
||||||
class ManagerMappings(
|
|
||||||
RootModel[Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]]]
|
|
||||||
):
|
|
||||||
root: Optional[Dict[str, List[Union[List[str], ManagerMappings1]]]] = Field(
|
|
||||||
None, description="Tuple of [node_names, metadata]"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ModelMetadata(BaseModel):
|
|
||||||
name: str = Field(..., description="Name of the model")
|
|
||||||
type: str = Field(..., description="Type of model")
|
|
||||||
base: Optional[str] = Field(None, description="Base model type")
|
|
||||||
save_path: Optional[str] = Field(None, description="Path for saving the model")
|
|
||||||
url: str = Field(..., description="Download URL")
|
|
||||||
filename: str = Field(..., description="Target filename")
|
|
||||||
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
|
||||||
|
|
||||||
|
|
||||||
class InstallType(str, Enum):
|
|
||||||
git = "git"
|
|
||||||
copy = "copy"
|
|
||||||
pip = "pip"
|
|
||||||
|
|
||||||
|
|
||||||
class NodePackageMetadata(BaseModel):
|
|
||||||
title: Optional[str] = Field(None, description="Display name of the node package")
|
|
||||||
name: Optional[str] = Field(None, description="Repository/package name")
|
|
||||||
files: Optional[List[str]] = Field(None, description="Source URLs for the package")
|
|
||||||
description: Optional[str] = Field(
|
|
||||||
None, description="Description of the node package functionality"
|
|
||||||
)
|
|
||||||
install_type: Optional[InstallType] = Field(None, description="Installation method")
|
|
||||||
version: Optional[str] = Field(None, description="Version identifier")
|
|
||||||
id: Optional[str] = Field(
|
|
||||||
None, description="Unique identifier for the node package"
|
|
||||||
)
|
|
||||||
ui_id: Optional[str] = Field(None, description="ID for UI reference")
|
|
||||||
channel: Optional[str] = Field(None, description="Source channel")
|
|
||||||
mode: Optional[str] = Field(None, description="Source mode")
|
|
||||||
|
|
||||||
|
|
||||||
class SnapshotItem(RootModel[str]):
|
|
||||||
root: str = Field(..., description="Name of the snapshot")
|
|
||||||
|
|
||||||
|
|
||||||
class Error(BaseModel):
|
|
||||||
error: str = Field(..., description="Error message")
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledPacksResponse(RootModel[Optional[Dict[str, ManagerPackInstalled]]]):
|
|
||||||
root: Optional[Dict[str, ManagerPackInstalled]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class HistoryListResponse(BaseModel):
|
|
||||||
ids: Optional[List[str]] = Field(
|
|
||||||
None, description="List of available batch history IDs"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledNodeInfo(BaseModel):
|
|
||||||
name: str = Field(..., description="Node package name")
|
|
||||||
version: str = Field(..., description="Installed version")
|
|
||||||
repository_url: Optional[str] = Field(None, description="Git repository URL")
|
|
||||||
install_method: str = Field(
|
|
||||||
..., description="Installation method (cnr, git, pip, etc.)"
|
|
||||||
)
|
|
||||||
enabled: Optional[bool] = Field(
|
|
||||||
True, description="Whether the node is currently enabled"
|
|
||||||
)
|
|
||||||
install_date: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp of installation"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstalledModelInfo(BaseModel):
|
|
||||||
name: str = Field(..., description="Model filename")
|
|
||||||
path: str = Field(..., description="Full path to model file")
|
|
||||||
type: str = Field(..., description="Model type (checkpoint, lora, vae, etc.)")
|
|
||||||
size_bytes: Optional[int] = Field(None, description="File size in bytes", ge=0)
|
|
||||||
hash: Optional[str] = Field(None, description="Model file hash for verification")
|
|
||||||
install_date: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp when added"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComfyUIVersionInfo(BaseModel):
|
|
||||||
version: str = Field(..., description="ComfyUI version string")
|
|
||||||
commit_hash: Optional[str] = Field(None, description="Git commit hash")
|
|
||||||
branch: Optional[str] = Field(None, description="Git branch name")
|
|
||||||
is_stable: Optional[bool] = Field(
|
|
||||||
False, description="Whether this is a stable release"
|
|
||||||
)
|
|
||||||
last_updated: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp of last update"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BatchOperation(BaseModel):
|
|
||||||
operation_id: str = Field(..., description="Unique operation identifier")
|
|
||||||
operation_type: OperationType
|
|
||||||
target: str = Field(
|
|
||||||
..., description="Target of the operation (node name, model name, etc.)"
|
|
||||||
)
|
|
||||||
target_version: Optional[str] = Field(
|
|
||||||
None, description="Target version for the operation"
|
|
||||||
)
|
|
||||||
result: OperationResult
|
|
||||||
error_message: Optional[str] = Field(
|
|
||||||
None, description="Error message if operation failed"
|
|
||||||
)
|
|
||||||
start_time: datetime = Field(
|
|
||||||
..., description="ISO timestamp when operation started"
|
|
||||||
)
|
|
||||||
end_time: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp when operation completed"
|
|
||||||
)
|
|
||||||
client_id: Optional[str] = Field(
|
|
||||||
None, description="Client that initiated the operation"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComfyUISystemState(BaseModel):
|
|
||||||
snapshot_time: datetime = Field(
|
|
||||||
..., description="ISO timestamp when snapshot was taken"
|
|
||||||
)
|
|
||||||
comfyui_version: ComfyUIVersionInfo
|
|
||||||
frontend_version: Optional[str] = Field(
|
|
||||||
None, description="ComfyUI frontend version if available"
|
|
||||||
)
|
|
||||||
python_version: str = Field(..., description="Python interpreter version")
|
|
||||||
platform_info: str = Field(
|
|
||||||
..., description="Operating system and platform information"
|
|
||||||
)
|
|
||||||
installed_nodes: Optional[Dict[str, InstalledNodeInfo]] = Field(
|
|
||||||
None, description="Map of installed node packages by name"
|
|
||||||
)
|
|
||||||
installed_models: Optional[Dict[str, InstalledModelInfo]] = Field(
|
|
||||||
None, description="Map of installed models by name"
|
|
||||||
)
|
|
||||||
manager_config: Optional[Dict[str, Any]] = Field(
|
|
||||||
None, description="ComfyUI Manager configuration settings"
|
|
||||||
)
|
|
||||||
comfyui_root_path: Optional[str] = Field(
|
|
||||||
None, description="ComfyUI root installation directory"
|
|
||||||
)
|
|
||||||
model_paths: Optional[Dict[str, List[str]]] = Field(
|
|
||||||
None, description="Map of model types to their configured paths"
|
|
||||||
)
|
|
||||||
manager_version: Optional[str] = Field(None, description="ComfyUI Manager version")
|
|
||||||
security_level: Optional[SecurityLevel] = None
|
|
||||||
network_mode: Optional[NetworkMode] = None
|
|
||||||
cli_args: Optional[Dict[str, Any]] = Field(
|
|
||||||
None, description="Selected ComfyUI CLI arguments"
|
|
||||||
)
|
|
||||||
custom_nodes_count: Optional[int] = Field(
|
|
||||||
None, description="Total number of custom node packages", ge=0
|
|
||||||
)
|
|
||||||
failed_imports: Optional[List[str]] = Field(
|
|
||||||
None, description="List of custom nodes that failed to import"
|
|
||||||
)
|
|
||||||
pip_packages: Optional[Dict[str, str]] = Field(
|
|
||||||
None, description="Map of installed pip packages to their versions"
|
|
||||||
)
|
|
||||||
embedded_python: Optional[bool] = Field(
|
|
||||||
None,
|
|
||||||
description="Whether ComfyUI is running from an embedded Python distribution",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BatchExecutionRecord(BaseModel):
|
|
||||||
batch_id: str = Field(..., description="Unique batch identifier")
|
|
||||||
start_time: datetime = Field(..., description="ISO timestamp when batch started")
|
|
||||||
end_time: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp when batch completed"
|
|
||||||
)
|
|
||||||
state_before: ComfyUISystemState
|
|
||||||
state_after: Optional[ComfyUISystemState] = Field(
|
|
||||||
None, description="System state after batch execution"
|
|
||||||
)
|
|
||||||
operations: Optional[List[BatchOperation]] = Field(
|
|
||||||
None, description="List of operations performed in this batch"
|
|
||||||
)
|
|
||||||
total_operations: Optional[int] = Field(
|
|
||||||
0, description="Total number of operations in batch", ge=0
|
|
||||||
)
|
|
||||||
successful_operations: Optional[int] = Field(
|
|
||||||
0, description="Number of successful operations", ge=0
|
|
||||||
)
|
|
||||||
failed_operations: Optional[int] = Field(
|
|
||||||
0, description="Number of failed operations", ge=0
|
|
||||||
)
|
|
||||||
skipped_operations: Optional[int] = Field(
|
|
||||||
0, description="Number of skipped operations", ge=0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoBulkRequest(BaseModel):
|
|
||||||
cnr_ids: Optional[List[str]] = Field(
|
|
||||||
None, description="A list of CNR IDs to check."
|
|
||||||
)
|
|
||||||
urls: Optional[List[str]] = Field(
|
|
||||||
None, description="A list of repository URLs to check."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoItem1(BaseModel):
|
|
||||||
error: Optional[str] = None
|
|
||||||
traceback: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoItem(RootModel[Optional[ImportFailInfoItem1]]):
|
|
||||||
root: Optional[ImportFailInfoItem1]
|
|
||||||
|
|
||||||
|
|
||||||
class QueueTaskItem(BaseModel):
|
|
||||||
ui_id: str = Field(..., description="Unique identifier for the task")
|
|
||||||
client_id: str = Field(..., description="Client identifier that initiated the task")
|
|
||||||
kind: OperationType
|
|
||||||
params: Union[
|
|
||||||
InstallPackParams,
|
|
||||||
UpdatePackParams,
|
|
||||||
FixPackParams,
|
|
||||||
UninstallPackParams,
|
|
||||||
DisablePackParams,
|
|
||||||
EnablePackParams,
|
|
||||||
ModelMetadata,
|
|
||||||
UpdateComfyUIParams,
|
|
||||||
UpdateAllPacksParams,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class TaskHistoryItem(BaseModel):
|
|
||||||
ui_id: str = Field(..., description="Unique identifier for the task")
|
|
||||||
client_id: str = Field(..., description="Client identifier that initiated the task")
|
|
||||||
kind: str = Field(..., description="Type of task that was performed")
|
|
||||||
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
|
||||||
result: str = Field(..., description="Task result message or details")
|
|
||||||
status: Optional[TaskExecutionStatus] = None
|
|
||||||
batch_id: Optional[str] = Field(
|
|
||||||
None, description="ID of the batch this task belongs to"
|
|
||||||
)
|
|
||||||
end_time: Optional[datetime] = Field(
|
|
||||||
None, description="ISO timestamp when task execution ended"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TaskStateMessage(BaseModel):
|
|
||||||
history: Dict[str, TaskHistoryItem] = Field(
|
|
||||||
..., description="Map of task IDs to their history items"
|
|
||||||
)
|
|
||||||
running_queue: List[QueueTaskItem] = Field(
|
|
||||||
..., description="Currently executing tasks"
|
|
||||||
)
|
|
||||||
pending_queue: List[QueueTaskItem] = Field(
|
|
||||||
..., description="Tasks waiting to be executed"
|
|
||||||
)
|
|
||||||
installed_packs: Dict[str, ManagerPackInstalled] = Field(
|
|
||||||
..., description="Map of currently installed node packages by name"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskDone(BaseModel):
|
|
||||||
ui_id: str = Field(..., description="Task identifier")
|
|
||||||
result: str = Field(..., description="Task result message")
|
|
||||||
kind: str = Field(..., description="Type of task")
|
|
||||||
status: Optional[TaskExecutionStatus] = None
|
|
||||||
timestamp: datetime = Field(..., description="ISO timestamp when task completed")
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskStarted(BaseModel):
|
|
||||||
ui_id: str = Field(..., description="Task identifier")
|
|
||||||
kind: str = Field(..., description="Type of task")
|
|
||||||
timestamp: datetime = Field(..., description="ISO timestamp when task started")
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTaskFailed(BaseModel):
|
|
||||||
ui_id: str = Field(..., description="Task identifier")
|
|
||||||
error: str = Field(..., description="Error message")
|
|
||||||
kind: str = Field(..., description="Type of task")
|
|
||||||
timestamp: datetime = Field(..., description="ISO timestamp when task failed")
|
|
||||||
state: TaskStateMessage
|
|
||||||
|
|
||||||
|
|
||||||
class MessageUpdate(
|
|
||||||
RootModel[Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed]]
|
|
||||||
):
|
|
||||||
root: Union[MessageTaskDone, MessageTaskStarted, MessageTaskFailed] = Field(
|
|
||||||
..., description="Union type for all possible WebSocket message updates"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HistoryResponse(BaseModel):
|
|
||||||
history: Optional[Dict[str, TaskHistoryItem]] = Field(
|
|
||||||
None, description="Map of task IDs to their history items"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImportFailInfoBulkResponse(RootModel[Optional[Dict[str, ImportFailInfoItem]]]):
|
|
||||||
root: Optional[Dict[str, ImportFailInfoItem]] = None
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
- Anytime you make a change to the data being sent or received, you should follow this process:
|
|
||||||
1. Adjust the openapi.yaml file first
|
|
||||||
2. Verify the syntax of the openapi.yaml file using `yaml.safe_load`
|
|
||||||
3. Regenerate the types following the instructions in the `data_models/README.md` file
|
|
||||||
4. Verify the new data model is generated
|
|
||||||
5. Verify the syntax of the generated types files
|
|
||||||
6. Run formatting and linting on the generated types files
|
|
||||||
7. Adjust the `__init__.py` files in the `data_models` directory to match/export the new data model
|
|
||||||
8. Only then, make the changes to the rest of the codebase
|
|
||||||
9. Run the CI tests to verify that the changes are working
|
|
||||||
- The comfyui_manager is a python package that is used to manage the comfyui server. There are two sub-packages `glob` and `legacy`. These represent the current version (`glob`) and the previous version (`legacy`), not including common utilities and data models. When developing, we work in the `glob` package. You can ignore the `legacy` package entirely, unless you have a very good reason to research how things were done in the legacy or prior major versions of the package. But in those cases, you should just look for the sake of knowledge or reflection, not for changing code (unless explicitly asked to do so).
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
|
|
||||||
SECURITY_MESSAGE_MIDDLE = "ERROR: To use this action, a security_level of `normal or below` is required. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_MIDDLE_P = "ERROR: To use this action, security_level must be `normal or below`, and network_mode must be set to `personal_cloud`. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS = "ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_GENERAL = "ERROR: This installation is not allowed in this security_level. Please contact the administrator.\nReference: https://github.com/ltdrdata/ComfyUI-Manager#security-policy"
|
|
||||||
SECURITY_MESSAGE_NORMAL_MINUS_MODEL = "ERROR: Downloading models that are not in '.safetensors' format is only allowed for models registered in the 'default' channel at this security level. If you want to download this model, set the security level to 'normal-' or lower."
|
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
try:
|
|
||||||
return ipaddress.ip_address(address).is_loopback
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
model_dir_name_map = {
|
|
||||||
"checkpoints": "checkpoints",
|
|
||||||
"checkpoint": "checkpoints",
|
|
||||||
"unclip": "checkpoints",
|
|
||||||
"text_encoders": "text_encoders",
|
|
||||||
"clip": "text_encoders",
|
|
||||||
"vae": "vae",
|
|
||||||
"lora": "loras",
|
|
||||||
"t2i-adapter": "controlnet",
|
|
||||||
"t2i-style": "controlnet",
|
|
||||||
"controlnet": "controlnet",
|
|
||||||
"clip_vision": "clip_vision",
|
|
||||||
"gligen": "gligen",
|
|
||||||
"upscale": "upscale_models",
|
|
||||||
"embedding": "embeddings",
|
|
||||||
"embeddings": "embeddings",
|
|
||||||
"unet": "diffusion_models",
|
|
||||||
"diffusion_model": "diffusion_models",
|
|
||||||
}
|
|
||||||
|
|
||||||
# List of all model directory names used for checking installed models
|
|
||||||
MODEL_DIR_NAMES = [
|
|
||||||
"checkpoints",
|
|
||||||
"loras",
|
|
||||||
"vae",
|
|
||||||
"text_encoders",
|
|
||||||
"diffusion_models",
|
|
||||||
"clip_vision",
|
|
||||||
"embeddings",
|
|
||||||
"diffusers",
|
|
||||||
"vae_approx",
|
|
||||||
"controlnet",
|
|
||||||
"gligen",
|
|
||||||
"upscale_models",
|
|
||||||
"hypernetworks",
|
|
||||||
"photomaker",
|
|
||||||
"classifiers",
|
|
||||||
]
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,451 +0,0 @@
|
|||||||
import mimetypes
|
|
||||||
from ..common import context
|
|
||||||
from . import manager_core as core
|
|
||||||
|
|
||||||
import os
|
|
||||||
from aiohttp import web
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import folder_paths
|
|
||||||
from server import PromptServer
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from nio import AsyncClient, LoginResponse, UploadResponse
|
|
||||||
matrix_nio_is_available = True
|
|
||||||
except Exception:
|
|
||||||
logging.warning(f"[ComfyUI-Manager] The matrix sharing feature has been disabled because the `matrix-nio` dependency is not installed.\n\tTo use this feature, please run the following command:\n\t{sys.executable} -m pip install matrix-nio\n")
|
|
||||||
matrix_nio_is_available = False
|
|
||||||
|
|
||||||
|
|
||||||
def extract_model_file_names(json_data):
|
|
||||||
"""Extract unique file names from the input JSON data."""
|
|
||||||
file_names = set()
|
|
||||||
model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'}
|
|
||||||
|
|
||||||
# Recursively search for file names in the JSON data
|
|
||||||
def recursive_search(data):
|
|
||||||
if isinstance(data, dict):
|
|
||||||
for value in data.values():
|
|
||||||
recursive_search(value)
|
|
||||||
elif isinstance(data, list):
|
|
||||||
for item in data:
|
|
||||||
recursive_search(item)
|
|
||||||
elif isinstance(data, str) and '.' in data:
|
|
||||||
file_names.add(os.path.basename(data)) # file_names.add(data)
|
|
||||||
|
|
||||||
recursive_search(json_data)
|
|
||||||
return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions]
|
|
||||||
|
|
||||||
|
|
||||||
def find_file_paths(base_dir, file_names):
|
|
||||||
"""Find the paths of the files in the base directory."""
|
|
||||||
file_paths = {}
|
|
||||||
|
|
||||||
for root, dirs, files in os.walk(base_dir):
|
|
||||||
# Exclude certain directories
|
|
||||||
dirs[:] = [d for d in dirs if d not in ['.git']]
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
if file in file_names:
|
|
||||||
file_paths[file] = os.path.join(root, file)
|
|
||||||
return file_paths
|
|
||||||
|
|
||||||
|
|
||||||
def compute_sha256_checksum(filepath):
|
|
||||||
"""Compute the SHA256 checksum of a file, in chunks"""
|
|
||||||
sha256 = hashlib.sha256()
|
|
||||||
with open(filepath, 'rb') as f:
|
|
||||||
for chunk in iter(lambda: f.read(4096), b''):
|
|
||||||
sha256.update(chunk)
|
|
||||||
return sha256.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
|
||||||
async def share_option(request):
|
|
||||||
if "value" in request.rel_url.query:
|
|
||||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
|
||||||
core.write_config()
|
|
||||||
else:
|
|
||||||
return web.Response(text=core.get_config()['share_option'], status=200)
|
|
||||||
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
def get_openart_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
|
||||||
openart_key = f.read().strip()
|
|
||||||
return openart_key if openart_key else None
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_matrix_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
|
||||||
matrix_auth = f.read()
|
|
||||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
|
||||||
if not homeserver or not username or not password:
|
|
||||||
return None
|
|
||||||
return {
|
|
||||||
"homeserver": homeserver,
|
|
||||||
"username": username,
|
|
||||||
"password": password,
|
|
||||||
}
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_comfyworkflows_auth():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
|
||||||
share_key = f.read()
|
|
||||||
if not share_key.strip():
|
|
||||||
return None
|
|
||||||
return share_key
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_youml_settings():
|
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
|
||||||
youml_settings = f.read().strip()
|
|
||||||
return youml_settings if youml_settings else None
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def set_youml_settings(settings):
|
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
|
||||||
f.write(settings)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
|
||||||
async def api_get_openart_auth(request):
|
|
||||||
# print("Getting stored Matrix credentials...")
|
|
||||||
openart_key = get_openart_auth()
|
|
||||||
if not openart_key:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response({"openart_key": openart_key})
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
|
||||||
async def api_set_openart_auth(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
openart_key = json_data['openart_key']
|
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
|
||||||
f.write(openart_key)
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
|
||||||
async def api_get_matrix_auth(request):
|
|
||||||
# print("Getting stored Matrix credentials...")
|
|
||||||
matrix_auth = get_matrix_auth()
|
|
||||||
if not matrix_auth:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response(matrix_auth)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
|
||||||
async def api_get_youml_settings(request):
|
|
||||||
youml_settings = get_youml_settings()
|
|
||||||
if not youml_settings:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response(json.loads(youml_settings))
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
|
||||||
async def api_set_youml_settings(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
set_youml_settings(json.dumps(json_data))
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
|
||||||
async def api_get_comfyworkflows_auth(request):
|
|
||||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
|
||||||
# in the same directory as the ComfyUI base folder
|
|
||||||
# print("Getting stored Comfyworkflows.com auth...")
|
|
||||||
comfyworkflows_auth = get_comfyworkflows_auth()
|
|
||||||
if not comfyworkflows_auth:
|
|
||||||
return web.Response(status=404)
|
|
||||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
|
||||||
async def set_esheep_workflow_and_images(request):
|
|
||||||
json_data = await request.json()
|
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
|
||||||
json.dump(json_data, file, indent=4)
|
|
||||||
return web.Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
|
||||||
async def get_esheep_workflow_and_images(request):
|
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
|
||||||
data = json.load(file)
|
|
||||||
return web.Response(status=200, text=json.dumps(data))
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_dep_status")
|
|
||||||
async def get_matrix_dep_status(request):
|
|
||||||
if matrix_nio_is_available:
|
|
||||||
return web.Response(status=200, text='available')
|
|
||||||
else:
|
|
||||||
return web.Response(status=200, text='unavailable')
|
|
||||||
|
|
||||||
|
|
||||||
def set_matrix_auth(json_data):
|
|
||||||
homeserver = json_data['homeserver']
|
|
||||||
username = json_data['username']
|
|
||||||
password = json_data['password']
|
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
|
||||||
f.write("\n".join([homeserver, username, password]))
|
|
||||||
|
|
||||||
|
|
||||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
|
||||||
f.write(comfyworkflows_sharekey)
|
|
||||||
|
|
||||||
|
|
||||||
def has_provided_matrix_auth(matrix_auth):
|
|
||||||
return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip()
|
|
||||||
|
|
||||||
|
|
||||||
def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
|
||||||
return comfyworkflows_sharekey.strip()
|
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
|
||||||
async def share_art(request):
|
|
||||||
# get json data
|
|
||||||
json_data = await request.json()
|
|
||||||
|
|
||||||
matrix_auth = json_data['matrix_auth']
|
|
||||||
comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey']
|
|
||||||
|
|
||||||
set_matrix_auth(matrix_auth)
|
|
||||||
set_comfyworkflows_auth(comfyworkflows_sharekey)
|
|
||||||
|
|
||||||
share_destinations = json_data['share_destinations']
|
|
||||||
credits = json_data['credits']
|
|
||||||
title = json_data['title']
|
|
||||||
description = json_data['description']
|
|
||||||
is_nsfw = json_data['is_nsfw']
|
|
||||||
prompt = json_data['prompt']
|
|
||||||
potential_outputs = json_data['potential_outputs']
|
|
||||||
selected_output_index = json_data['selected_output_index']
|
|
||||||
|
|
||||||
try:
|
|
||||||
output_to_share = potential_outputs[int(selected_output_index)]
|
|
||||||
except Exception:
|
|
||||||
# for now, pick the first output
|
|
||||||
output_to_share = potential_outputs[0]
|
|
||||||
|
|
||||||
assert output_to_share['type'] in ('image', 'output')
|
|
||||||
output_dir = folder_paths.get_output_directory()
|
|
||||||
|
|
||||||
if output_to_share['type'] == 'image':
|
|
||||||
asset_filename = output_to_share['image']['filename']
|
|
||||||
asset_subfolder = output_to_share['image']['subfolder']
|
|
||||||
|
|
||||||
if output_to_share['image']['type'] == 'temp':
|
|
||||||
output_dir = folder_paths.get_temp_directory()
|
|
||||||
else:
|
|
||||||
asset_filename = output_to_share['output']['filename']
|
|
||||||
asset_subfolder = output_to_share['output']['subfolder']
|
|
||||||
|
|
||||||
if asset_subfolder:
|
|
||||||
asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename)
|
|
||||||
else:
|
|
||||||
asset_filepath = os.path.join(output_dir, asset_filename)
|
|
||||||
|
|
||||||
# get the mime type of the asset
|
|
||||||
assetFileType = mimetypes.guess_type(asset_filepath)[0]
|
|
||||||
|
|
||||||
share_website_host = "UNKNOWN"
|
|
||||||
if "comfyworkflows" in share_destinations:
|
|
||||||
share_website_host = "https://comfyworkflows.com"
|
|
||||||
share_endpoint = f"{share_website_host}/api"
|
|
||||||
|
|
||||||
# get presigned urls
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.post(
|
|
||||||
f"{share_endpoint}/get_presigned_urls",
|
|
||||||
json={
|
|
||||||
"assetFileName": asset_filename,
|
|
||||||
"assetFileType": assetFileType,
|
|
||||||
"workflowJsonFileName": 'workflow.json',
|
|
||||||
"workflowJsonFileType": 'application/json',
|
|
||||||
},
|
|
||||||
) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
presigned_urls_json = await resp.json()
|
|
||||||
assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"]
|
|
||||||
assetFileKey = presigned_urls_json["assetFileKey"]
|
|
||||||
workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"]
|
|
||||||
workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"]
|
|
||||||
|
|
||||||
# upload asset
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
|
|
||||||
# upload workflow json
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
|
|
||||||
model_filenames = extract_model_file_names(prompt['workflow'])
|
|
||||||
model_file_paths = find_file_paths(folder_paths.base_path, model_filenames)
|
|
||||||
|
|
||||||
models_info = {}
|
|
||||||
for filename, filepath in model_file_paths.items():
|
|
||||||
models_info[filename] = {
|
|
||||||
"filename": filename,
|
|
||||||
"sha256_checksum": compute_sha256_checksum(filepath),
|
|
||||||
"relative_path": os.path.relpath(filepath, folder_paths.base_path),
|
|
||||||
}
|
|
||||||
|
|
||||||
# make a POST request to /api/upload_workflow with form data key values
|
|
||||||
async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
||||||
form = aiohttp.FormData()
|
|
||||||
if comfyworkflows_sharekey:
|
|
||||||
form.add_field("shareKey", comfyworkflows_sharekey)
|
|
||||||
form.add_field("source", "comfyui_manager")
|
|
||||||
form.add_field("assetFileKey", assetFileKey)
|
|
||||||
form.add_field("assetFileType", assetFileType)
|
|
||||||
form.add_field("workflowJsonFileKey", workflowJsonFileKey)
|
|
||||||
form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow']))
|
|
||||||
form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output']))
|
|
||||||
form.add_field("shareWorkflowCredits", credits)
|
|
||||||
form.add_field("shareWorkflowTitle", title)
|
|
||||||
form.add_field("shareWorkflowDescription", description)
|
|
||||||
form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower())
|
|
||||||
form.add_field("currentSnapshot", json.dumps(await core.get_current_snapshot()))
|
|
||||||
form.add_field("modelsInfo", json.dumps(models_info))
|
|
||||||
|
|
||||||
async with session.post(
|
|
||||||
f"{share_endpoint}/upload_workflow",
|
|
||||||
data=form,
|
|
||||||
) as resp:
|
|
||||||
assert resp.status == 200
|
|
||||||
upload_workflow_json = await resp.json()
|
|
||||||
workflowId = upload_workflow_json["workflowId"]
|
|
||||||
|
|
||||||
# check if the user has provided Matrix credentials
|
|
||||||
if matrix_nio_is_available and "matrix" in share_destinations:
|
|
||||||
comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org'
|
|
||||||
filename = os.path.basename(asset_filepath)
|
|
||||||
content_type = assetFileType
|
|
||||||
|
|
||||||
try:
|
|
||||||
homeserver = 'matrix.org'
|
|
||||||
if matrix_auth:
|
|
||||||
homeserver = matrix_auth.get('homeserver', 'matrix.org')
|
|
||||||
homeserver = homeserver.replace("http://", "https://")
|
|
||||||
if not homeserver.startswith("https://"):
|
|
||||||
homeserver = "https://" + homeserver
|
|
||||||
|
|
||||||
client = AsyncClient(homeserver, matrix_auth['username'])
|
|
||||||
|
|
||||||
# Login
|
|
||||||
login_resp = await client.login(matrix_auth['password'])
|
|
||||||
if not isinstance(login_resp, LoginResponse) or not login_resp.access_token:
|
|
||||||
await client.close()
|
|
||||||
return web.json_response({"error": "Invalid Matrix credentials."}, content_type='application/json', status=400)
|
|
||||||
|
|
||||||
# Upload asset
|
|
||||||
with open(asset_filepath, 'rb') as f:
|
|
||||||
upload_resp, _maybe_keys = await client.upload(f, content_type=content_type, filename=filename)
|
|
||||||
asset_data = f.seek(0) or f.read() # get size for info below
|
|
||||||
if not isinstance(upload_resp, UploadResponse) or not upload_resp.content_uri:
|
|
||||||
await client.close()
|
|
||||||
return web.json_response({"error": "Failed to upload asset to Matrix."}, content_type='application/json', status=500)
|
|
||||||
mxc_url = upload_resp.content_uri
|
|
||||||
|
|
||||||
# Upload workflow JSON
|
|
||||||
import io
|
|
||||||
workflow_json_bytes = json.dumps(prompt['workflow']).encode('utf-8')
|
|
||||||
workflow_io = io.BytesIO(workflow_json_bytes)
|
|
||||||
upload_workflow_resp, _maybe_keys = await client.upload(workflow_io, content_type='application/json', filename='workflow.json')
|
|
||||||
workflow_io.seek(0)
|
|
||||||
if not isinstance(upload_workflow_resp, UploadResponse) or not upload_workflow_resp.content_uri:
|
|
||||||
await client.close()
|
|
||||||
return web.json_response({"error": "Failed to upload workflow to Matrix."}, content_type='application/json', status=500)
|
|
||||||
workflow_json_mxc_url = upload_workflow_resp.content_uri
|
|
||||||
|
|
||||||
# Send text message
|
|
||||||
text_content = ""
|
|
||||||
if title:
|
|
||||||
text_content += f"{title}\n"
|
|
||||||
if description:
|
|
||||||
text_content += f"{description}\n"
|
|
||||||
if credits:
|
|
||||||
text_content += f"\ncredits: {credits}\n"
|
|
||||||
await client.room_send(
|
|
||||||
room_id=comfyui_share_room_id,
|
|
||||||
message_type="m.room.message",
|
|
||||||
content={"msgtype": "m.text", "body": text_content}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send image
|
|
||||||
await client.room_send(
|
|
||||||
room_id=comfyui_share_room_id,
|
|
||||||
message_type="m.room.message",
|
|
||||||
content={
|
|
||||||
"msgtype": "m.image",
|
|
||||||
"body": filename,
|
|
||||||
"url": mxc_url,
|
|
||||||
"info": {
|
|
||||||
"mimetype": content_type,
|
|
||||||
"size": len(asset_data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send workflow JSON file
|
|
||||||
await client.room_send(
|
|
||||||
room_id=comfyui_share_room_id,
|
|
||||||
message_type="m.room.message",
|
|
||||||
content={
|
|
||||||
"msgtype": "m.file",
|
|
||||||
"body": "workflow.json",
|
|
||||||
"url": workflow_json_mxc_url,
|
|
||||||
"info": {
|
|
||||||
"mimetype": "application/json",
|
|
||||||
"size": len(workflow_json_bytes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
await client.close()
|
|
||||||
|
|
||||||
except:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500)
|
|
||||||
|
|
||||||
return web.json_response({
|
|
||||||
"comfyworkflows": {
|
|
||||||
"url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}",
|
|
||||||
},
|
|
||||||
"matrix": {
|
|
||||||
"success": None if "matrix" not in share_destinations else True
|
|
||||||
}
|
|
||||||
}, content_type='application/json', status=200)
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
import os
|
|
||||||
import git
|
|
||||||
import logging
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from comfyui_manager.common import context
|
|
||||||
import folder_paths
|
|
||||||
from comfy.cli_args import args
|
|
||||||
import latent_preview
|
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfyui_manager.common import cm_global
|
|
||||||
|
|
||||||
|
|
||||||
comfy_ui_hash = "-"
|
|
||||||
comfyui_tag = None
|
|
||||||
|
|
||||||
|
|
||||||
def print_comfyui_version():
|
|
||||||
global comfy_ui_hash
|
|
||||||
global comfyui_tag
|
|
||||||
|
|
||||||
is_detached = False
|
|
||||||
try:
|
|
||||||
repo = git.Repo(os.path.dirname(folder_paths.__file__))
|
|
||||||
core.comfy_ui_revision = len(list(repo.iter_commits("HEAD")))
|
|
||||||
|
|
||||||
comfy_ui_hash = repo.head.commit.hexsha
|
|
||||||
cm_global.variables["comfyui.revision"] = core.comfy_ui_revision
|
|
||||||
|
|
||||||
core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime
|
|
||||||
cm_global.variables["comfyui.commit_datetime"] = core.comfy_ui_commit_datetime
|
|
||||||
|
|
||||||
is_detached = repo.head.is_detached
|
|
||||||
current_branch = repo.active_branch.name
|
|
||||||
|
|
||||||
comfyui_tag = context.get_comfyui_tag()
|
|
||||||
|
|
||||||
try:
|
|
||||||
if (
|
|
||||||
not os.environ.get("__COMFYUI_DESKTOP_VERSION__")
|
|
||||||
and core.comfy_ui_commit_datetime.date()
|
|
||||||
< core.comfy_ui_required_commit_datetime.date()
|
|
||||||
):
|
|
||||||
logging.warning(
|
|
||||||
f"\n\n## [WARN] ComfyUI-Manager: Your ComfyUI version ({core.comfy_ui_revision})[{core.comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version. ##\n\n"
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# process on_revision_detected -->
|
|
||||||
if "cm.on_revision_detected_handler" in cm_global.variables:
|
|
||||||
for k, f in cm_global.variables["cm.on_revision_detected_handler"]:
|
|
||||||
try:
|
|
||||||
f(core.comfy_ui_revision)
|
|
||||||
except Exception:
|
|
||||||
logging.error(f"[ERROR] '{k}' on_revision_detected_handler")
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
del cm_global.variables["cm.on_revision_detected_handler"]
|
|
||||||
else:
|
|
||||||
logging.warning(
|
|
||||||
"[ComfyUI-Manager] Some features are restricted due to your ComfyUI being outdated."
|
|
||||||
)
|
|
||||||
# <--
|
|
||||||
|
|
||||||
if current_branch == "master":
|
|
||||||
if comfyui_tag:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Version: {comfyui_tag} | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if comfyui_tag:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Version: {comfyui_tag} on '{current_branch}' | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} on '{current_branch}' [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
if is_detached:
|
|
||||||
logging.info(
|
|
||||||
f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] *DETACHED | Released on '{core.comfy_ui_commit_datetime.date()}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
"### ComfyUI Revision: UNKNOWN (The currently installed ComfyUI is not a Git repository)"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def set_preview_method(method):
|
|
||||||
if method == "auto":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.Auto
|
|
||||||
elif method == "latent2rgb":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.Latent2RGB
|
|
||||||
elif method == "taesd":
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.TAESD
|
|
||||||
else:
|
|
||||||
args.preview_method = latent_preview.LatentPreviewMethod.NoPreviews
|
|
||||||
|
|
||||||
core.get_config()["preview_method"] = method
|
|
||||||
|
|
||||||
|
|
||||||
def set_update_policy(mode):
|
|
||||||
core.get_config()["update_policy"] = mode
|
|
||||||
|
|
||||||
|
|
||||||
def set_db_mode(mode):
|
|
||||||
core.get_config()["db_mode"] = mode
|
|
||||||
|
|
||||||
|
|
||||||
def setup_environment():
|
|
||||||
git_exe = core.get_config()["git_exe"]
|
|
||||||
|
|
||||||
if git_exe != "":
|
|
||||||
git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe)
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_environment():
|
|
||||||
context.comfy_path = os.path.dirname(folder_paths.__file__)
|
|
||||||
core.js_path = os.path.join(context.comfy_path, "web", "extensions")
|
|
||||||
|
|
||||||
# Legacy database paths - kept for potential future use
|
|
||||||
# local_db_model = os.path.join(manager_util.comfyui_manager_path, "model-list.json")
|
|
||||||
# local_db_alter = os.path.join(manager_util.comfyui_manager_path, "alter-list.json")
|
|
||||||
# local_db_custom_node_list = os.path.join(
|
|
||||||
# manager_util.comfyui_manager_path, "custom-node-list.json"
|
|
||||||
# )
|
|
||||||
# local_db_extension_node_mappings = os.path.join(
|
|
||||||
# manager_util.comfyui_manager_path, "extension-node-map.json"
|
|
||||||
# )
|
|
||||||
|
|
||||||
set_preview_method(core.get_config()["preview_method"])
|
|
||||||
print_comfyui_version()
|
|
||||||
setup_environment()
|
|
||||||
|
|
||||||
core.check_invalid_nodes()
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import locale
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
def handle_stream(stream, prefix):
|
|
||||||
stream.reconfigure(encoding=locale.getpreferredencoding(), errors="replace")
|
|
||||||
for msg in stream:
|
|
||||||
if (
|
|
||||||
prefix == "[!]"
|
|
||||||
and ("it/s]" in msg or "s/it]" in msg)
|
|
||||||
and ("%|" in msg or "it [" in msg)
|
|
||||||
):
|
|
||||||
if msg.startswith("100%"):
|
|
||||||
print("\r" + msg, end="", file=sys.stderr),
|
|
||||||
else:
|
|
||||||
print("\r" + msg[:-1], end="", file=sys.stderr),
|
|
||||||
else:
|
|
||||||
if prefix == "[!]":
|
|
||||||
print(prefix, msg, end="", file=sys.stderr)
|
|
||||||
else:
|
|
||||||
print(prefix, msg, end="")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_markdown_to_html(input_text):
|
|
||||||
pattern_a = re.compile(r"\[a/([^]]+)]\(([^)]+)\)")
|
|
||||||
pattern_w = re.compile(r"\[w/([^]]+)]")
|
|
||||||
pattern_i = re.compile(r"\[i/([^]]+)]")
|
|
||||||
pattern_bold = re.compile(r"\*\*([^*]+)\*\*")
|
|
||||||
pattern_white = re.compile(r"%%([^*]+)%%")
|
|
||||||
|
|
||||||
def replace_a(match):
|
|
||||||
return f"<a href='{match.group(2)}' target='blank'>{match.group(1)}</a>"
|
|
||||||
|
|
||||||
def replace_w(match):
|
|
||||||
return f"<p class='cm-warn-note'>{match.group(1)}</p>"
|
|
||||||
|
|
||||||
def replace_i(match):
|
|
||||||
return f"<p class='cm-info-note'>{match.group(1)}</p>"
|
|
||||||
|
|
||||||
def replace_bold(match):
|
|
||||||
return f"<B>{match.group(1)}</B>"
|
|
||||||
|
|
||||||
def replace_white(match):
|
|
||||||
return f"<font color='white'>{match.group(1)}</font>"
|
|
||||||
|
|
||||||
input_text = (
|
|
||||||
input_text.replace("\\[", "[")
|
|
||||||
.replace("\\]", "]")
|
|
||||||
.replace("<", "<")
|
|
||||||
.replace(">", ">")
|
|
||||||
)
|
|
||||||
|
|
||||||
result_text = re.sub(pattern_a, replace_a, input_text)
|
|
||||||
result_text = re.sub(pattern_w, replace_w, result_text)
|
|
||||||
result_text = re.sub(pattern_i, replace_i, result_text)
|
|
||||||
result_text = re.sub(pattern_bold, replace_bold, result_text)
|
|
||||||
result_text = re.sub(pattern_white, replace_white, result_text)
|
|
||||||
|
|
||||||
return result_text.replace("\n", "<BR>")
|
|
||||||
@@ -1,137 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
import concurrent.futures
|
|
||||||
import folder_paths
|
|
||||||
|
|
||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfyui_manager.glob.constants import model_dir_name_map, MODEL_DIR_NAMES
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_dir(data, show_log=False):
|
|
||||||
if "download_model_base" in folder_paths.folder_names_and_paths:
|
|
||||||
models_base = folder_paths.folder_names_and_paths["download_model_base"][0][0]
|
|
||||||
else:
|
|
||||||
models_base = folder_paths.models_dir
|
|
||||||
|
|
||||||
# NOTE: Validate to prevent path traversal.
|
|
||||||
if any(char in data["filename"] for char in {"/", "\\", ":"}):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if data["save_path"] != "default":
|
|
||||||
if ".." in data["save_path"] or data["save_path"].startswith("/"):
|
|
||||||
if show_log:
|
|
||||||
logging.info(
|
|
||||||
f"[WARN] '{data['save_path']}' is not allowed path. So it will be saved into 'models/etc'."
|
|
||||||
)
|
|
||||||
base_model = os.path.join(models_base, "etc")
|
|
||||||
else:
|
|
||||||
if data["save_path"].startswith("custom_nodes"):
|
|
||||||
logging.warning("The feature to download models into the custom node path is no longer supported.")
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
base_model = os.path.join(models_base, data["save_path"])
|
|
||||||
else:
|
|
||||||
model_dir_name = model_dir_name_map.get(data["type"].lower())
|
|
||||||
if model_dir_name is not None:
|
|
||||||
base_model = folder_paths.folder_names_and_paths[model_dir_name][0][0]
|
|
||||||
else:
|
|
||||||
base_model = os.path.join(models_base, "etc")
|
|
||||||
|
|
||||||
return base_model
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_path(data, show_log=False):
|
|
||||||
base_model = get_model_dir(data, show_log)
|
|
||||||
if base_model is None:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
if data["filename"] == "<huggingface>":
|
|
||||||
return os.path.join(base_model, os.path.basename(data["url"]))
|
|
||||||
else:
|
|
||||||
return os.path.join(base_model, data["filename"])
|
|
||||||
|
|
||||||
|
|
||||||
def check_model_installed(json_obj):
|
|
||||||
def is_exists(model_dir_name, filename, url):
|
|
||||||
if filename == "<huggingface>":
|
|
||||||
filename = os.path.basename(url)
|
|
||||||
|
|
||||||
dirs = folder_paths.get_folder_paths(model_dir_name)
|
|
||||||
|
|
||||||
for x in dirs:
|
|
||||||
if os.path.exists(os.path.join(x, filename)):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
total_models_files = set()
|
|
||||||
for x in MODEL_DIR_NAMES:
|
|
||||||
for y in folder_paths.get_filename_list(x):
|
|
||||||
total_models_files.add(y)
|
|
||||||
|
|
||||||
def process_model_phase(item):
|
|
||||||
if (
|
|
||||||
"diffusion" not in item["filename"]
|
|
||||||
and "pytorch" not in item["filename"]
|
|
||||||
and "model" not in item["filename"]
|
|
||||||
):
|
|
||||||
# non-general name case
|
|
||||||
if item["filename"] in total_models_files:
|
|
||||||
item["installed"] = "True"
|
|
||||||
return
|
|
||||||
|
|
||||||
if item["save_path"] == "default":
|
|
||||||
model_dir_name = model_dir_name_map.get(item["type"].lower())
|
|
||||||
if model_dir_name is not None:
|
|
||||||
item["installed"] = str(
|
|
||||||
is_exists(model_dir_name, item["filename"], item["url"])
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
item["installed"] = "False"
|
|
||||||
else:
|
|
||||||
model_dir_name = item["save_path"].split("/")[0]
|
|
||||||
if model_dir_name in folder_paths.folder_names_and_paths:
|
|
||||||
if is_exists(model_dir_name, item["filename"], item["url"]):
|
|
||||||
item["installed"] = "True"
|
|
||||||
|
|
||||||
if "installed" not in item:
|
|
||||||
if item["filename"] == "<huggingface>":
|
|
||||||
filename = os.path.basename(item["url"])
|
|
||||||
else:
|
|
||||||
filename = item["filename"]
|
|
||||||
|
|
||||||
fullpath = os.path.join(
|
|
||||||
folder_paths.models_dir, item["save_path"], filename
|
|
||||||
)
|
|
||||||
|
|
||||||
item["installed"] = "True" if os.path.exists(fullpath) else "False"
|
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(8) as executor:
|
|
||||||
for item in json_obj["models"]:
|
|
||||||
executor.submit(process_model_phase, item)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_whitelist_for_model(item):
|
|
||||||
from comfyui_manager.data_models import ManagerDatabaseSource
|
|
||||||
|
|
||||||
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.cache.value, "model-list.json")
|
|
||||||
|
|
||||||
for x in json_obj.get("models", []):
|
|
||||||
if (
|
|
||||||
x["save_path"] == item["save_path"]
|
|
||||||
and x["base"] == item["base"]
|
|
||||||
and x["filename"] == item["filename"]
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
json_obj = await core.get_data_by_mode(ManagerDatabaseSource.local.value, "model-list.json")
|
|
||||||
|
|
||||||
for x in json_obj.get("models", []):
|
|
||||||
if (
|
|
||||||
x["save_path"] == item["save_path"]
|
|
||||||
and x["base"] == item["base"]
|
|
||||||
and x["filename"] == item["filename"]
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
from comfyui_manager.glob import manager_core as core
|
|
||||||
from comfy.cli_args import args
|
|
||||||
from comfyui_manager.data_models import SecurityLevel, RiskLevel
|
|
||||||
|
|
||||||
|
|
||||||
def is_loopback(address):
|
|
||||||
import ipaddress
|
|
||||||
try:
|
|
||||||
return ipaddress.ip_address(address).is_loopback
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_allowed_security_level(level):
|
|
||||||
is_local_mode = is_loopback(args.listen)
|
|
||||||
is_personal_cloud = core.get_config()['network_mode'].lower() == 'personal_cloud'
|
|
||||||
|
|
||||||
if level == RiskLevel.block.value:
|
|
||||||
return False
|
|
||||||
elif level == RiskLevel.high_.value:
|
|
||||||
if is_local_mode:
|
|
||||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
|
||||||
elif is_personal_cloud:
|
|
||||||
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
elif level == RiskLevel.high.value:
|
|
||||||
if is_local_mode:
|
|
||||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal_.value]
|
|
||||||
else:
|
|
||||||
return core.get_config()['security_level'] == SecurityLevel.weak.value
|
|
||||||
elif level == RiskLevel.middle_.value:
|
|
||||||
if is_local_mode or is_personal_cloud:
|
|
||||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
elif level == RiskLevel.middle.value:
|
|
||||||
return core.get_config()['security_level'] in [SecurityLevel.weak.value, SecurityLevel.normal.value, SecurityLevel.normal_.value]
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
517
comfyui_manager/custom-node-list.json → custom-node-list.json
Normal file → Executable file
517
comfyui_manager/custom-node-list.json → custom-node-list.json
Normal file → Executable file
@@ -1030,6 +1030,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "image and latent quilting nodes for comfyui"
|
"description": "image and latent quilting nodes for comfyui"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "bmad4ever",
|
||||||
|
"title": "comfyui-panels",
|
||||||
|
"reference": "https://github.com/bmad4ever/comfyui_panels",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/bmad4ever/comfyui_panels"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Comics/Manga like panel layouts."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "FizzleDorf",
|
"author": "FizzleDorf",
|
||||||
"title": "FizzNodes",
|
"title": "FizzNodes",
|
||||||
@@ -4388,16 +4398,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": " If you want to draw two different characters together without blending their features, so you could try to check out this custom node."
|
"description": " If you want to draw two different characters together without blending their features, so you could try to check out this custom node."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "42lux",
|
|
||||||
"title": "ComfyUI-42lux",
|
|
||||||
"reference": "https://github.com/42lux/ComfyUI-42lux",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/42lux/ComfyUI-42lux"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "A collection of custom nodes for ComfyUI focused on enhanced sampling, model optimization, and quality improvements."
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "sergekatzmann",
|
"author": "sergekatzmann",
|
||||||
"title": "ComfyUI_Nimbus-Pack",
|
"title": "ComfyUI_Nimbus-Pack",
|
||||||
@@ -8195,6 +8195,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A ComfyUI custom node for MiniCPM vision-language models, enabling high-quality image captioning and analysis."
|
"description": "A ComfyUI custom node for MiniCPM vision-language models, enabling high-quality image captioning and analysis."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "1038lab",
|
||||||
|
"title": "ComfyUI-FireRedTTS",
|
||||||
|
"reference": "https://github.com/1038lab/ComfyUI-FireRedTTS",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/1038lab/ComfyUI-FireRedTTS"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A ComfyUI integration for FireRedTTS‑2, a real-time multi-speaker TTS system enabling high-quality, emotionally expressive dialogue and monologue synthesis. Leveraging a streaming architecture and context-aware prosody modeling, it supports natural speaker turns and stable long-form generation, ideal for interactive chat and podcast applications."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "Klinter",
|
"author": "Klinter",
|
||||||
"title": "Klinter_nodes",
|
"title": "Klinter_nodes",
|
||||||
@@ -9152,17 +9162,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "You can use memeplex and DALL-E thru ComfyUI. You need API keys."
|
"description": "You can use memeplex and DALL-E thru ComfyUI. You need API keys."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "impactframes",
|
|
||||||
"title": "IF_AI_tools",
|
|
||||||
"id": "impactframes-tools",
|
|
||||||
"reference": "https://github.com/if-ai/ComfyUI-IF_AI_tools",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/if-ai/ComfyUI-IF_AI_tools"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "Various AI tools to use in Comfy UI. Starting with VL and prompt making tools using Ollma as backend will evolve as I find time."
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "impactframes",
|
"author": "impactframes",
|
||||||
"title": "IF_AI_WishperSpeechNode",
|
"title": "IF_AI_WishperSpeechNode",
|
||||||
@@ -13197,7 +13196,17 @@
|
|||||||
"https://github.com/vanche1212/ComfyUI-ZMG-Nodes"
|
"https://github.com/vanche1212/ComfyUI-ZMG-Nodes"
|
||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "Nodes:ApiRequestNode, LoadVideoNode, JsonParserNode, OllamaRequestNode, OldPhotoColorizationNode."
|
"description": "Nodes:ApiRequestNode, LoadVideoNode, JsonParserNode, OllamaRequestNode, OldPhotoColorizationNode, waveform_2_audio, SaveImageUnifiedOutput, VideoHelperUnifiedOutput, ..."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "vanche1212",
|
||||||
|
"title": "ComfyUI InspireMusic Plugin",
|
||||||
|
"reference": "https://github.com/vanche1212/ComfyUI-InspireMusic",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/vanche1212/ComfyUI-InspireMusic"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "InspireMusic ComfyUI Plugin – ComfyUI Integration Plugin for AI Music Generation\nA ComfyUI node plugin based on Alibaba’s InspireMusic model, supporting text-to-music generation and music continuation features."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "hben35096",
|
"author": "hben35096",
|
||||||
@@ -14350,6 +14359,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A ComfyUI node to implement Together AI API image generation"
|
"description": "A ComfyUI node to implement Together AI API image generation"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "APZmedia",
|
||||||
|
"title": "ComfyUI APZmedia PSD Tools",
|
||||||
|
"reference": "https://github.com/APZmedia/APZmedia-ComfyUI-PSDtools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/APZmedia/APZmedia-ComfyUI-PSDtools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This extension provides PSD layer saving functionalities with mask support for ComfyUI."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "N3rd00d",
|
"author": "N3rd00d",
|
||||||
"title": "ComfyUI-Paint3D-Nodes",
|
"title": "ComfyUI-Paint3D-Nodes",
|
||||||
@@ -17405,6 +17424,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "Using IPAdapter for style consistency, the node accepts a story structured as text {prompt} text {prompt} etc. and generates a comic, saving it to /output. It also adds LLM API Request node, providing an openai compatible LLM API for generating the stories."
|
"description": "Using IPAdapter for style consistency, the node accepts a story structured as text {prompt} text {prompt} etc. and generates a comic, saving it to /output. It also adds LLM API Request node, providing an openai compatible LLM API for generating the stories."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "blob8",
|
||||||
|
"title": "ComfyUI_video-image-motion-transfer",
|
||||||
|
"reference": "https://github.com/blob8/ComfyUI_video-image-motion-transfer",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/blob8/ComfyUI_video-image-motion-transfer"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Introduces a node that tries to approximate the entire video using it's first frame (that we stylize) by warping it using optical flow extracted from the video. First we do image-to-image using the reference video's first frame and a depth controlnet. The generated object doesn't have to closely resemble the reference like on the demo. Then the generated image and the video frames are fed into the node and it returns a warped video. No ai models are used by the node. The example workflow uses SDXL but you can get it to work with any arch if you manage i2i."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "banqingyuan",
|
"author": "banqingyuan",
|
||||||
"title": "ComfyUI-text-replace",
|
"title": "ComfyUI-text-replace",
|
||||||
@@ -18731,7 +18760,7 @@
|
|||||||
"https://github.com/sneccc/comfyui-snek-nodes"
|
"https://github.com/sneccc/comfyui-snek-nodes"
|
||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "NODES:Aesthetics, Aesthetics V2, Load AI Toolkit Latent Flux, Send_to_Eagle"
|
"description": "NODES:Aesthetics, Aesthetics V2, Load AI Toolkit Latent Flux, Send_to_Eagle, Snek SQLite Prompt Logger"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "theshubzworld",
|
"author": "theshubzworld",
|
||||||
@@ -23505,6 +23534,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "This custom node for ComfyUI allows you to scrape and download images and videos from the Midjourney showcase pages. It uses undetected_chromedriver to bypass anti-scraping measures, but requires session cookies from a logged-in browser session to function correctly."
|
"description": "This custom node for ComfyUI allows you to scrape and download images and videos from the Midjourney showcase pages. It uses undetected_chromedriver to bypass anti-scraping measures, but requires session cookies from a logged-in browser session to function correctly."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "lum3on",
|
||||||
|
"title": "ComfyUI Reve API Integration Node",
|
||||||
|
"reference": "https://github.com/lum3on/ComfyUI_Reve-API",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/lum3on/ComfyUI_Reve-API"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A unified ComfyUI custom node that integrates all Reve API endpoints (Create, Edit, Remix) into a single, dynamic node with operation-specific inputs and seamless operation switching."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "austinbrown34",
|
"author": "austinbrown34",
|
||||||
"title": "ComfyUI-IO-Helpers",
|
"title": "ComfyUI-IO-Helpers",
|
||||||
@@ -27242,6 +27281,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "VibeVoice TTS. Expressive, long-form, multi-speaker conversational audio"
|
"description": "VibeVoice TTS. Expressive, long-form, multi-speaker conversational audio"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "wildminder",
|
||||||
|
"title": "ComfyUI-VoxCPM",
|
||||||
|
"reference": "https://github.com/wildminder/ComfyUI-VoxCPM",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/wildminder/ComfyUI-VoxCPM"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "VoxCPM TTS. Context-aware, expressive speech generation and true-to-life voice cloning"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "cloudkoala",
|
"author": "cloudkoala",
|
||||||
"title": "comfyui-koala",
|
"title": "comfyui-koala",
|
||||||
@@ -27296,7 +27345,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "hao-ai-lab",
|
"author": "hao-ai-lab",
|
||||||
"title": "ComfyUI-FastVideo",
|
"title": "FastVideo",
|
||||||
"reference": "https://github.com/hao-ai-lab/FastVideo",
|
"reference": "https://github.com/hao-ai-lab/FastVideo",
|
||||||
"files": [
|
"files": [
|
||||||
"https://github.com/hao-ai-lab/FastVideo"
|
"https://github.com/hao-ai-lab/FastVideo"
|
||||||
@@ -28085,16 +28134,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "This is a custom node for ComfyUI that retrieves detailed information about an image, including its name, format (extension), DPI, dimensions, long side, short side, file size, and EXIF data. It also supports image saving "
|
"description": "This is a custom node for ComfyUI that retrieves detailed information about an image, including its name, format (extension), DPI, dimensions, long side, short side, file size, and EXIF data. It also supports image saving "
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "sm079",
|
|
||||||
"title": "ComfyUI-Face-Detection",
|
|
||||||
"reference": "https://github.com/sm079/ComfyUI-Face-Detection",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/sm079/ComfyUI-Face-Detection"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "face detection nodes for comfyui"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "r-vage",
|
"author": "r-vage",
|
||||||
"title": "ComfyUI-RvTools_v2",
|
"title": "ComfyUI-RvTools_v2",
|
||||||
@@ -28466,6 +28505,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A comprehensive ComfyUI plugin that enables seamless integration with Substance 3D Designer workflows through command line automation. This plugin provides custom nodes for cooking .sbs files, rendering .sbsar archives, controlling material parameters, and batch processing Substance materials within ComfyUI workflows."
|
"description": "A comprehensive ComfyUI plugin that enables seamless integration with Substance 3D Designer workflows through command line automation. This plugin provides custom nodes for cooking .sbs files, rendering .sbsar archives, controlling material parameters, and batch processing Substance materials within ComfyUI workflows."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "jasonjgardner",
|
||||||
|
"title": "comfyui-pixel-art-scalers",
|
||||||
|
"reference": "https://github.com/jasonjgardner/comfyui-pixel-art-scalers",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/jasonjgardner/comfyui-pixel-art-scalers"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A ComfyUI node for scaling pixel art using various algorithms"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "sLKbabawhsiang",
|
"author": "sLKbabawhsiang",
|
||||||
"title": "ComfyUI-TuZi-Flux-Kontext",
|
"title": "ComfyUI-TuZi-Flux-Kontext",
|
||||||
@@ -29169,16 +29218,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "Smart, unified model loaders for ComfyUI that support both standard .safetensors and quantized .gguf formats — no switching nodes required. Includes flexible UNET and CLIP loaders that work across models like SDXL, SD3, Flux, and more."
|
"description": "Smart, unified model loaders for ComfyUI that support both standard .safetensors and quantized .gguf formats — no switching nodes required. Includes flexible UNET and CLIP loaders that work across models like SDXL, SD3, Flux, and more."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "lucak5s",
|
|
||||||
"title": "ComfyUI GFPGAN",
|
|
||||||
"reference": "https://github.com/lucak5s/comfyui_gfpgan",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/lucak5s/comfyui_gfpgan"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "Face restoration with GFPGAN."
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "joeriben",
|
"author": "joeriben",
|
||||||
"title": "AI4ArtsEd Nodes",
|
"title": "AI4ArtsEd Nodes",
|
||||||
@@ -30707,16 +30746,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "This is a modified implementation of impact-pack's iterative upscaler. It leans in on the idea that giving too much attention to computation at high resolutions isn't a good idea."
|
"description": "This is a modified implementation of impact-pack's iterative upscaler. It leans in on the idea that giving too much attention to computation at high resolutions isn't a good idea."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "AlfredClark",
|
|
||||||
"title": "ComfyUI-ModelSpec",
|
|
||||||
"reference": "https://github.com/AlfredClark/ComfyUI-ModelSpec",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/AlfredClark/ComfyUI-ModelSpec"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "ComfyUI model metadata editing nodes."
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "zl9739379",
|
"author": "zl9739379",
|
||||||
"title": "ComfyUI-ArkVideoGenerate",
|
"title": "ComfyUI-ArkVideoGenerate",
|
||||||
@@ -31335,13 +31364,13 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "VraethrDalkr",
|
"author": "VraethrDalkr",
|
||||||
"title": "ComfyUI-ProgressiveBlend",
|
"title": "ComfyUI-TripleKSampler",
|
||||||
"reference": "https://github.com/VraethrDalkr/ComfyUI-ProgressiveBlend",
|
"reference": "https://github.com/VraethrDalkr/ComfyUI-TripleKSampler",
|
||||||
"files": [
|
"files": [
|
||||||
"https://github.com/VraethrDalkr/ComfyUI-ProgressiveBlend"
|
"https://github.com/VraethrDalkr/ComfyUI-TripleKSampler"
|
||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A collection of custom nodes for ComfyUI that enable progressive blending and color matching effects across image batches/video frames."
|
"description": "Triple-stage KSampler for Wan2.2 split models with Lightning LoRA"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "NHLStenden",
|
"author": "NHLStenden",
|
||||||
@@ -31731,6 +31760,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "ComfyUI Civitai Gallery is a powerful custom node for ComfyUI that integrates a seamless image browser for the Civitai website directly into your workflow. This node allows you to browse, search, and select images from Civitai and instantly import their prompts, negative prompts, and full-resolution images into your workflow. It is designed to significantly speed up your creative process by eliminating the need to switch between your browser and ComfyUI."
|
"description": "ComfyUI Civitai Gallery is a powerful custom node for ComfyUI that integrates a seamless image browser for the Civitai website directly into your workflow. This node allows you to browse, search, and select images from Civitai and instantly import their prompts, negative prompts, and full-resolution images into your workflow. It is designed to significantly speed up your creative process by eliminating the need to switch between your browser and ComfyUI."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "Firetheft",
|
||||||
|
"title": "ComfyUI_Pixabay_Gallery",
|
||||||
|
"reference": "https://github.com/Firetheft/ComfyUI_Pixabay_Gallery",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Firetheft/ComfyUI_Pixabay_Gallery"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A powerful node for browsing and importing media from Pixabay directly within ComfyUI."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "RegulusAlpha",
|
"author": "RegulusAlpha",
|
||||||
"title": "ComfyUI Dynamic Prompting Simplified",
|
"title": "ComfyUI Dynamic Prompting Simplified",
|
||||||
@@ -31751,16 +31790,6 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A simple custom node to use reflect padding mode in the conv layers of VAEs."
|
"description": "A simple custom node to use reflect padding mode in the conv layers of VAEs."
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"author": "netroxin",
|
|
||||||
"title": "comfyui_netro",
|
|
||||||
"reference": "https://github.com/netroxin/comfyui_netro",
|
|
||||||
"files": [
|
|
||||||
"https://github.com/netroxin/comfyui_netro"
|
|
||||||
],
|
|
||||||
"install_type": "git-clone",
|
|
||||||
"description": "#Camera Movement Prompt Node for ComfyUI\nThis custom node script for ComfyUI generates descriptive camera movement prompts based on user-selected movement options for Wan2.2"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"author": "alexds9",
|
"author": "alexds9",
|
||||||
"title": "Save Checkpoint with Metadata",
|
"title": "Save Checkpoint with Metadata",
|
||||||
@@ -32438,11 +32467,11 @@
|
|||||||
"description": "save your image with customized naming rule"
|
"description": "save your image with customized naming rule"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"author": "leylahkrell",
|
"author": "leylahviolet",
|
||||||
"title": "ComfyUI Violet Tools",
|
"title": "ComfyUI Violet Tools",
|
||||||
"reference": "https://github.com/leylahkrell/ComfyUI-Violet-Tools",
|
"reference": "https://github.com/leylahviolet/ComfyUI-Violet-Tools",
|
||||||
"files": [
|
"files": [
|
||||||
"https://github.com/leylahkrell/ComfyUI-Violet-Tools"
|
"https://github.com/leylahviolet/ComfyUI-Violet-Tools"
|
||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A collection of aesthetic-focused custom nodes for ComfyUI that enhance AI image generation with sophisticated style and prompt management capabilities. Includes 7 nodes: Aesthetic Alchemist (style blending with 20+ curated aesthetics), Quality Queen (quality prompts), Glamour Goddess (hair/makeup), Body Bard (body features), Pose Priestess (positioning), Encoding Enchantress (text processing), and Negativity Nullifier (negative prompts). Features weighted blending, randomization, and modular YAML-based configuration."
|
"description": "A collection of aesthetic-focused custom nodes for ComfyUI that enhance AI image generation with sophisticated style and prompt management capabilities. Includes 7 nodes: Aesthetic Alchemist (style blending with 20+ curated aesthetics), Quality Queen (quality prompts), Glamour Goddess (hair/makeup), Body Bard (body features), Pose Priestess (positioning), Encoding Enchantress (text processing), and Negativity Nullifier (negative prompts). Features weighted blending, randomization, and modular YAML-based configuration."
|
||||||
@@ -32649,6 +32678,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A custom node for ComfyUI that provides seamless integration with the Wan models from Alibaba Cloud Model Studio. This solution delivers cutting-edge image and video generation capabilities directly within ComfyUI, supporting both international and Mainland China regions."
|
"description": "A custom node for ComfyUI that provides seamless integration with the Wan models from Alibaba Cloud Model Studio. This solution delivers cutting-edge image and video generation capabilities directly within ComfyUI, supporting both international and Mainland China regions."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "ru4ls",
|
||||||
|
"title": "ComfyUI_Imagen",
|
||||||
|
"reference": "https://github.com/ru4ls/ComfyUI_Imagen",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/ru4ls/ComfyUI_Imagen"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node for ComfyUI that leverages the Google Cloud Vertex AI Imagen API to generate and edit images."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "garg-aayush",
|
"author": "garg-aayush",
|
||||||
"title": "ComfyUI-Svg2Raster",
|
"title": "ComfyUI-Svg2Raster",
|
||||||
@@ -32718,7 +32757,341 @@
|
|||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "Lightweight ComfyUI wrapper for IndexTTS 2 (voice cloning + emotion control)."
|
"description": "Lightweight ComfyUI wrapper for IndexTTS 2 (voice cloning + emotion control)."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "snicolast",
|
||||||
|
"title": "ComfyUI-Manufnode",
|
||||||
|
"reference": "https://github.com/efortin/ComfyUI-Ollama-Enhancer",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/efortin/ComfyUI-Ollama-Enhancer"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom ComfyUI nodes integrating Ollama to generate and enhance positive/negative prompts for Stable Diffusion workflows."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "fr0nky0ng",
|
||||||
|
"title": "ComfyUI-Face-Comparator",
|
||||||
|
"reference": "https://github.com/fr0nky0ng/ComfyUI-Face-Comparator",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/fr0nky0ng/ComfyUI-Face-Comparator"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is a node to detect the similarity between two faces"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "fr0nky0ng",
|
||||||
|
"title": "ComfyUI-frankAI-banana",
|
||||||
|
"reference": "https://github.com/fr0nky0ng/ComfyUI-frankAI-banana",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/fr0nky0ng/ComfyUI-frankAI-banana"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A set of ComfyUI nodes for calling the Nano-Banana API , while also supporting the Google API and frankAI API."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Tr1dae",
|
||||||
|
"title": "LoRA Matcher Nodes for ComfyUI",
|
||||||
|
"reference": "https://github.com/Tr1dae/ComfyUI-LoraPromptMatcher",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Tr1dae/ComfyUI-LoraPromptMatcher"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This custom node provides two different approaches to automatically match text prompts with LoRA models using their descriptions."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "nakagawadev",
|
||||||
|
"title": "comfyui_nakagawa",
|
||||||
|
"reference": "https://github.com/nakagawadev/comfyui_nakagawa",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/nakagawadev/comfyui_nakagawa"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A collection of custom nodes for ComfyUI that send video data through websockets instead of saving to disk."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "nakMe-guminagawadev",
|
||||||
|
"title": "MeComfyuiEncrypt",
|
||||||
|
"reference": "https://github.com/Me-gumin/MeComfyuiEncrypt",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Me-gumin/MeComfyuiEncrypt"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Image obfuscation in ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "dzy1128",
|
||||||
|
"title": "Seedream Image Generate ComfyUI Node",
|
||||||
|
"reference": "https://github.com/dzy1128/Seedream-Image-Generate-ComfyUI",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/dzy1128/Seedream-Image-Generate-ComfyUI"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A ComfyUI custom node based on the Volcano Engine Doubao large model Seedream API, designed for high-quality image generation."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "swfxliyiyu",
|
||||||
|
"title": "ComfyUI-FastVideo",
|
||||||
|
"reference": "https://github.com/swfxliyiyu/ComfyUI-FastVideo",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/swfxliyiyu/ComfyUI-FastVideo"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node suite for ComfyUI that provides accelerated video generation using [a/FastVideo](https://github.com/hao-ai-labs/FastVideo). See the blog post about FastVideo V1 to learn more."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "darkamenosa",
|
||||||
|
"title": "Enhanced Image Composite Masked",
|
||||||
|
"reference": "https://github.com/darkamenosa/comfy_inpaint_blend",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/darkamenosa/comfy_inpaint_blend"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Seamless inpainting for image-space models like Google Nano Banana and ByteDance Seedream 4. Fixes color mismatches using Poisson blending."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "len-ml",
|
||||||
|
"title": "comfyui_qwen_image_edit_adv",
|
||||||
|
"reference": "https://github.com/lenML/comfyui_qwen_image_edit_adv",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/lenML/comfyui_qwen_image_edit_adv"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Improved qwen image editing accuracy"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Ian2073",
|
||||||
|
"title": "ComfyUI-MyLLMNode",
|
||||||
|
"reference": "https://github.com/Ian2073/ComfyUI-MyLLMnode",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Ian2073/ComfyUI-MyLLMnode"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom ComfyUI node for running LLMs via HuggingFace pipeline. Supports both local paths and HuggingFace model names."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "semonxue",
|
||||||
|
"title": "ComfyUI FlexAI Nodes",
|
||||||
|
"reference": "https://github.com/Semonxue/Comfyui-flexai",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Semonxue/Comfyui-flexai"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Use the fewest nodes for the most flexible model calls. A versatile ComfyUI plugin for OpenAI-compatible APIs, featuring multi-purpose nodes for text and image, support for switching between multiple API providers, and auto-saving of custom models. Compatible with new models like nano-banana and seedream4."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Dehypnotic",
|
||||||
|
"title": "Save MP3",
|
||||||
|
"reference": "https://github.com/Dehypnotic/comfyui-save-mp3",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Dehypnotic/comfyui-save-mp3"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A node for saving audio in MP3-format with selected bitrate mode and quality to an output subfolder or absolute path on any drive."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Aaalice233",
|
||||||
|
"title": "ComfyUI-Danbooru-Gallery",
|
||||||
|
"reference": "https://github.com/Aaalice233/ComfyUI-Danbooru-Gallery",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Aaalice233/ComfyUI-Danbooru-Gallery"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A danbooru gallery for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "l33chking",
|
||||||
|
"title": "Danbooru FAISS Search Nodes",
|
||||||
|
"id": "danbooru-faiss-search",
|
||||||
|
"reference": "https://github.com/L33chKing/ComfyUI-danbooru-FAISS-search",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/L33chKing/ComfyUI-danbooru-FAISS-search"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Use image to search similar images from danbooru using various methods. Notice: the optional API key will be saved to metadata if used"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "GegenDenTag",
|
||||||
|
"title": "Multi Area Conditioning",
|
||||||
|
"reference": "https://github.com/GegenDenTag/ComfyUI-multi-area-condition-node",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/GegenDenTag/ComfyUI-multi-area-condition-node"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Fix Nov. 2024, Davemane42's Custom Node for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Kaleidia",
|
||||||
|
"title": "KaleidiaNodes",
|
||||||
|
"reference": "https://github.com/Kaleidia/KaleidiaNodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Kaleidia/KaleidiaNodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A simple set of nodes to make things easier. String Nodes and Files nodes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "ialhabbal",
|
||||||
|
"title": "ComfyUI Prompt Verify",
|
||||||
|
"reference": "https://github.com/ialhabbal/ComfyUI-Prompt-Verify",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/ialhabbal/ComfyUI-Prompt-Verify"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom node to pause a string flow/prompt and let you edit the text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "xhh522",
|
||||||
|
"title": "ComfyUI Preview Monitor",
|
||||||
|
"reference": "https://github.com/xhh522/ComfyUI-preview-monitor",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/xhh522/ComfyUI-preview-monitor"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A powerful ComfyUI custom node for image preview and monitoring"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Junst",
|
||||||
|
"title": "ComfyUI-Concept-Diffusion",
|
||||||
|
"reference": "https://github.com/Junst/ComfyUI-Concept-Diffusion",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Junst/ComfyUI-Concept-Diffusion"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ConceptAttention: Diffusion Transformers Learn Highly Interpretable Features for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "comrender",
|
||||||
|
"title": "ComfyUI-NanoSeed",
|
||||||
|
"reference": "https://github.com/comrender/ComfyUI-NanoSeed",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/comrender/ComfyUI-NanoSeed"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom ComfyUI node for seamless image editing using fal.ai's NanoBanana and Seedream (v4) models. Edit images with AI-powered prompts, supporting multi-image batches, custom resolutions, and easy fal.ai API key integration directly in the node interface. "
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "DecartAI",
|
||||||
|
"title": "Lucy-Edit-ComfyUI",
|
||||||
|
"reference": "https://github.com/DecartAI/Lucy-Edit-ComfyUI",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/DecartAI/Lucy-Edit-ComfyUI"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Lucy Edit is a video editing model that performs instruction-guided edits on videos using free-text prompts — it supports a variety of edits, such as clothing & accessory changes, character changes, object insertions, and scene replacements while preserving the motion and composition perfectly."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "wallen0322",
|
||||||
|
"title": "ComfyUI-QI-QwenEditSafe",
|
||||||
|
"reference": "https://github.com/wallen0322/ComfyUI-QI-QwenEditSafe",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/wallen0322/ComfyUI-QI-QwenEditSafe"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: QI • TextEncodeQwenImageEdit, QI • VAE Decode, QI • RefEdit Encode — by wallen0322"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Yuxi Liu",
|
||||||
|
"title": "comfyui-ddu",
|
||||||
|
"reference": "https://github.com/YL-Lyx/Comfyui-ddu-toolchain",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/YL-Lyx/Comfyui-ddu-toolchain"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ai-driven toolchain for digital design and fabrication "
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Sean-Bradley",
|
||||||
|
"title": "ComfyUI Virtual Webcam",
|
||||||
|
"reference": "https://github.com/Sean-Bradley/ComfyUI-Virtual-Webcam",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Sean-Bradley/ComfyUI-Virtual-Webcam"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A Virtual Camera Output For ComfyUI. On Windows, it will use the OBS Virtual Camera driver. So make sure you have OBS installed. Then in your other webcam capable applications, such as Google Meet, Teams, Zoom and even OBS itself, you can connect to the OBS Virtual Camera option and see what you are outputting from ComfyUI."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "LukeCoulson1",
|
||||||
|
"title": "ComfyUI LoRA Combine Node",
|
||||||
|
"reference": "https://github.com/LukeCoulson1/Comfyui_LoraCombine",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/LukeCoulson1/Comfyui_LoraCombine"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom ComfyUI node that allows you to combine two LoRAs with adjustable strengths and multiple merging methods."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "ahkimkoo",
|
||||||
|
"title": "ComfyUI Audio Segment Plugin",
|
||||||
|
"reference": "https://github.com/ahkimkoo/Comfyui-AudioSegment",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/ahkimkoo/Comfyui-AudioSegment"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom node suite for ComfyUI designed for advanced audio processing"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "joyboy123456",
|
||||||
|
"title": "ComfyUI Google AI Studio Integration",
|
||||||
|
"reference": "https://github.com/joyboy123456/-ComfyUI-GoogleAIStudio",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/joyboy123456/-ComfyUI-GoogleAIStudio"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A comprehensive ComfyUI plugin that integrates Google AI Studio's powerful AI models for content generation, image creation, video planning, and advanced prompt optimization."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Noma-Machiko",
|
||||||
|
"title": "ComfyUI-ToDevice",
|
||||||
|
"reference": "https://github.com/Noma-Machiko/ComfyUI-ToDevice",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Noma-Machiko/ComfyUI-ToDevice"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A simple collection of device conversion nodes for ComfyUI. These nodes move an image node running on the GPU over to the CPU. They also support moving from CPU to GPU, but in that case, please remove the CPU option from the ComfyUI startup options."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "AdamShuo",
|
||||||
|
"title": "ComfyUI Switch Any Node",
|
||||||
|
"reference": "https://github.com/AdamShuo/ComfyUI_Switch",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/AdamShuo/ComfyUI_Switch"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is a custom node for ComfyUI that provides a dynamic 'Switch' for routing purposes. It allows you to define a list of named labels and select one, outputting the corresponding index and label name. This is useful for controlling the flow of your workflow based on a selection."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "mcrataobrabo",
|
||||||
|
"title": "comfyui-smart-lora-downloader - Automatically Fetch Missing LoRAs",
|
||||||
|
"reference": "https://github.com/mcrataobrabo/comfyui-smart-lora-downloader",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/mcrataobrabo/comfyui-smart-lora-downloader"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Automatically detect and download missing LoRAs for ComfyUI workflows"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "3dgopnik",
|
||||||
|
"title": "comfyui-smart-lora-downloader - Automatically Fetch Missing LoRAs",
|
||||||
|
"reference": "https://github.com/3dgopnik/comfyui-arena-suite",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/3dgopnik/comfyui-arena-suite"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI custom nodes in a single package: Arena legacy + AutoCache + Updater"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SiggEye",
|
||||||
|
"title": "FaceCanon — Consistent Faces at Any Resolution",
|
||||||
|
"reference": "https://github.com/SiggEye/FaceCanon",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SiggEye/FaceCanon"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "FaceCanon scales a detected face to a canonical pixel size, lets you run your favorite face detailer at that sweet spot, then maps the result back into the original image with seamless blending. The payoff is consistent face style no matter the input resolution or framing."
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,496 +0,0 @@
|
|||||||
# Package Version Management Design
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
ComfyUI Manager supports two package version types, each with distinct installation methods and version switching mechanisms:
|
|
||||||
|
|
||||||
1. **CNR Version (Archive)**: Production-ready releases with semantic versioning (e.g., v1.0.2), published to CNR server, verified, and distributed as ZIP archives
|
|
||||||
2. **Nightly Version**: Real-time development builds from Git repository without semantic versioning, providing direct access to latest code via git pull
|
|
||||||
|
|
||||||
## Package ID Normalization
|
|
||||||
|
|
||||||
### Case Sensitivity Handling
|
|
||||||
|
|
||||||
**Source of Truth**: Package IDs originate from `pyproject.toml` with their original case (e.g., `ComfyUI_SigmoidOffsetScheduler`)
|
|
||||||
|
|
||||||
**Normalization Process**:
|
|
||||||
1. `cnr_utils.normalize_package_name()` provides centralized normalization (`cnr_utils.py:28-48`):
|
|
||||||
```python
|
|
||||||
def normalize_package_name(name: str) -> str:
|
|
||||||
"""
|
|
||||||
Normalize package name for case-insensitive matching.
|
|
||||||
- Strip leading/trailing whitespace
|
|
||||||
- Convert to lowercase
|
|
||||||
"""
|
|
||||||
return name.strip().lower()
|
|
||||||
```
|
|
||||||
2. `cnr_utils.read_cnr_info()` uses this normalization when indexing (`cnr_utils.py:314`):
|
|
||||||
```python
|
|
||||||
name = project.get('name').strip().lower()
|
|
||||||
```
|
|
||||||
3. Package indexed in `installed_node_packages` with lowercase ID: `'comfyui_sigmoidoffsetscheduler'`
|
|
||||||
4. **Critical**: All lookups (`is_enabled()`, `unified_disable()`) must use `cnr_utils.normalize_package_name()` for matching
|
|
||||||
|
|
||||||
**Implementation** (`manager_core.py:1374, 1389`):
|
|
||||||
```python
|
|
||||||
# Before checking if package is enabled or disabling
|
|
||||||
packname_normalized = cnr_utils.normalize_package_name(packname)
|
|
||||||
if self.is_enabled(packname_normalized):
|
|
||||||
self.unified_disable(packname_normalized)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Package Identification
|
|
||||||
|
|
||||||
### How Packages Are Identified
|
|
||||||
|
|
||||||
**Critical**: Packages MUST be identified by marker files and metadata, NOT by directory names.
|
|
||||||
|
|
||||||
**Identification Flow** (`manager_core.py:691-703`, `node_package.py:49-81`):
|
|
||||||
|
|
||||||
```python
|
|
||||||
def resolve_from_path(fullpath):
|
|
||||||
"""
|
|
||||||
Identify package type and ID using markers and metadata files.
|
|
||||||
|
|
||||||
Priority:
|
|
||||||
1. Check for .git directory (Nightly)
|
|
||||||
2. Check for .tracking + pyproject.toml (CNR)
|
|
||||||
3. Unknown/legacy (fallback to directory name)
|
|
||||||
"""
|
|
||||||
# 1. Nightly Detection
|
|
||||||
url = git_utils.git_url(fullpath) # Checks for .git/config
|
|
||||||
if url:
|
|
||||||
url = git_utils.compact_url(url)
|
|
||||||
commit_hash = git_utils.get_commit_hash(fullpath)
|
|
||||||
return {'id': url, 'ver': 'nightly', 'hash': commit_hash}
|
|
||||||
|
|
||||||
# 2. CNR Detection
|
|
||||||
info = cnr_utils.read_cnr_info(fullpath) # Checks for .tracking + pyproject.toml
|
|
||||||
if info:
|
|
||||||
return {'id': info['id'], 'ver': info['version']}
|
|
||||||
|
|
||||||
# 3. Unknown (fallback)
|
|
||||||
return None
|
|
||||||
```
|
|
||||||
|
|
||||||
### Marker-Based Identification
|
|
||||||
|
|
||||||
**1. Nightly Packages**:
|
|
||||||
- **Marker**: `.git` directory presence
|
|
||||||
- **ID Extraction**: Read URL from `.git/config` using `git_utils.git_url()` (`git_utils.py:34-53`)
|
|
||||||
- **ID Format**: Compact URL (e.g., `https://github.com/owner/repo` → compact form)
|
|
||||||
- **Why**: Git repositories are uniquely identified by their remote URL
|
|
||||||
|
|
||||||
**2. CNR Packages**:
|
|
||||||
- **Markers**: `.tracking` file AND `pyproject.toml` file (`.git` must NOT exist)
|
|
||||||
- **ID Extraction**: Read `name` from `pyproject.toml` using `cnr_utils.read_cnr_info()` (`cnr_utils.py:302-334`)
|
|
||||||
- **ID Format**: Normalized lowercase from `pyproject.toml` (e.g., `ComfyUI_Foo` → `comfyui_foo`)
|
|
||||||
- **Why**: CNR packages are identified by their canonical name in package metadata
|
|
||||||
|
|
||||||
**Implementation** (`cnr_utils.py:302-334`):
|
|
||||||
```python
|
|
||||||
def read_cnr_info(fullpath):
|
|
||||||
toml_path = os.path.join(fullpath, 'pyproject.toml')
|
|
||||||
tracking_path = os.path.join(fullpath, '.tracking')
|
|
||||||
|
|
||||||
# MUST have both markers and NO .git directory
|
|
||||||
if not os.path.exists(toml_path) or not os.path.exists(tracking_path):
|
|
||||||
return None # not valid CNR node pack
|
|
||||||
|
|
||||||
with open(toml_path, "r", encoding="utf-8") as f:
|
|
||||||
data = toml.load(f)
|
|
||||||
project = data.get('project', {})
|
|
||||||
name = project.get('name').strip().lower() # ← Normalized for indexing
|
|
||||||
original_name = project.get('name') # ← Original case preserved
|
|
||||||
version = str(manager_util.StrictVersion(project.get('version')))
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": name, # Normalized ID for lookups
|
|
||||||
"original_name": original_name,
|
|
||||||
"version": version,
|
|
||||||
"url": repository
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Why NOT Directory Names?
|
|
||||||
|
|
||||||
**Problem with directory-based identification**:
|
|
||||||
1. **Case Sensitivity Issues**: Same package can have different directory names
|
|
||||||
- Active: `ComfyUI_Foo` (original case)
|
|
||||||
- Disabled: `comfyui_foo@1_0_2` (lowercase)
|
|
||||||
2. **Version Suffix Confusion**: Disabled directories include version in name
|
|
||||||
3. **User Modifications**: Users can rename directories, breaking identification
|
|
||||||
|
|
||||||
**Correct Approach**:
|
|
||||||
- **Source of Truth**: Marker files (`.git`, `.tracking`, `pyproject.toml`)
|
|
||||||
- **Consistent IDs**: Based on metadata content, not filesystem names
|
|
||||||
- **Case Insensitive**: Normalized lookups work regardless of directory name
|
|
||||||
|
|
||||||
### Package Lookup Flow
|
|
||||||
|
|
||||||
**Index Building** (`manager_core.py:444-478`):
|
|
||||||
```python
|
|
||||||
def reload(self):
|
|
||||||
self.installed_node_packages: dict[str, list[InstalledNodePackage]] = defaultdict(list)
|
|
||||||
|
|
||||||
# Scan active packages
|
|
||||||
for x in os.listdir(custom_nodes_path):
|
|
||||||
fullpath = os.path.join(custom_nodes_path, x)
|
|
||||||
if x not in ['__pycache__', '.disabled']:
|
|
||||||
node_package = InstalledNodePackage.from_fullpath(fullpath, self.resolve_from_path)
|
|
||||||
# ↓ Uses ID from resolve_from_path(), NOT directory name
|
|
||||||
self.installed_node_packages[node_package.id].append(node_package)
|
|
||||||
|
|
||||||
# Scan disabled packages
|
|
||||||
for x in os.listdir(disabled_dir):
|
|
||||||
fullpath = os.path.join(disabled_dir, x)
|
|
||||||
node_package = InstalledNodePackage.from_fullpath(fullpath, self.resolve_from_path)
|
|
||||||
# ↓ Same ID extraction, consistent indexing
|
|
||||||
self.installed_node_packages[node_package.id].append(node_package)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Lookup Process**:
|
|
||||||
1. Normalize search term: `cnr_utils.normalize_package_name(packname)`
|
|
||||||
2. Look up in `installed_node_packages` dict by normalized ID
|
|
||||||
3. Match found packages by version if needed
|
|
||||||
4. Return `InstalledNodePackage` objects with full metadata
|
|
||||||
|
|
||||||
### Edge Cases
|
|
||||||
|
|
||||||
**1. Package with `.git` AND `.tracking`**:
|
|
||||||
- **Detection**: Treated as Nightly (`.git` checked first)
|
|
||||||
- **Reason**: Git repo takes precedence over archive markers
|
|
||||||
- **Fix**: Remove `.tracking` file to avoid confusion
|
|
||||||
|
|
||||||
**2. Missing Marker Files**:
|
|
||||||
- **CNR without `.tracking`**: Treated as Unknown
|
|
||||||
- **Nightly without `.git`**: Treated as Unknown or CNR (if has `.tracking`)
|
|
||||||
- **Recovery**: Re-install package to restore correct markers
|
|
||||||
|
|
||||||
**3. Corrupted `pyproject.toml`**:
|
|
||||||
- **Detection**: `read_cnr_info()` returns `None`
|
|
||||||
- **Result**: Package treated as Unknown
|
|
||||||
- **Recovery**: Manual fix or re-install
|
|
||||||
|
|
||||||
## Version Types
|
|
||||||
|
|
||||||
ComfyUI Manager supports two main package version types:
|
|
||||||
|
|
||||||
### 1. CNR Version (Comfy Node Registry - Versioned Releases)
|
|
||||||
|
|
||||||
**Also known as**: Archive version (because it's distributed as ZIP archive)
|
|
||||||
|
|
||||||
**Purpose**: Production-ready releases that have been versioned, published to CNR server, and verified before distribution
|
|
||||||
|
|
||||||
**Characteristics**:
|
|
||||||
- Semantic versioning assigned (e.g., v1.0.2, v2.1.0)
|
|
||||||
- Published to CNR server with verification process
|
|
||||||
- Stable, tested releases for production use
|
|
||||||
- Distributed as ZIP archives for reliability
|
|
||||||
|
|
||||||
**Installation Method**: ZIP file extraction from CNR (Comfy Node Registry)
|
|
||||||
|
|
||||||
**Identification**:
|
|
||||||
- Presence of `.tracking` file in package directory
|
|
||||||
- **Directory naming**:
|
|
||||||
- **Active** (`custom_nodes/`): Uses `name` from `pyproject.toml` with original case (e.g., `ComfyUI_SigmoidOffsetScheduler`)
|
|
||||||
- This is the `original_name` in glob/ implementation
|
|
||||||
- **Disabled** (`.disabled/`): Uses `{package_name}@{version}` format (e.g., `comfyui_sigmoidoffsetscheduler@1_0_2`)
|
|
||||||
- Package indexed with lowercase ID from `pyproject.toml`
|
|
||||||
- Versioned releases (e.g., v1.0.2, v2.1.0)
|
|
||||||
|
|
||||||
**`.tracking` File Purpose**:
|
|
||||||
- **Primary**: Marker to identify this as a CNR/archive installation
|
|
||||||
- **Critical**: Contains list of original files from the archive
|
|
||||||
- **Update Use Case**: When updating to a new version:
|
|
||||||
1. Read `.tracking` to identify original archive files
|
|
||||||
2. Delete ONLY original archive files
|
|
||||||
3. Preserve user-generated files (configs, models, custom code)
|
|
||||||
4. Extract new archive version
|
|
||||||
5. Update `.tracking` with new file list
|
|
||||||
|
|
||||||
**File Structure**:
|
|
||||||
```
|
|
||||||
custom_nodes/
|
|
||||||
ComfyUI_SigmoidOffsetScheduler/
|
|
||||||
.tracking # List of original archive files
|
|
||||||
pyproject.toml # name = "ComfyUI_SigmoidOffsetScheduler"
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
(user-created files preserved during update)
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Nightly Version (Development Builds)
|
|
||||||
|
|
||||||
**Purpose**: Real-time development builds from Git repository without semantic versioning
|
|
||||||
|
|
||||||
**Characteristics**:
|
|
||||||
- No semantic version assigned (version = "nightly")
|
|
||||||
- Direct access to latest development code
|
|
||||||
- Real-time updates via git pull
|
|
||||||
- For testing, development, and early adoption
|
|
||||||
- Not verified through CNR publication process
|
|
||||||
|
|
||||||
**Installation Method**: Git repository clone
|
|
||||||
|
|
||||||
**Identification**:
|
|
||||||
- Presence of `.git` directory in package directory
|
|
||||||
- `version: "nightly"` in package metadata
|
|
||||||
- **Directory naming**:
|
|
||||||
- **Active** (`custom_nodes/`): Uses `name` from `pyproject.toml` with original case (e.g., `ComfyUI_SigmoidOffsetScheduler`)
|
|
||||||
- This is the `original_name` in glob/ implementation
|
|
||||||
- **Disabled** (`.disabled/`): Uses `{package_name}@nightly` format (e.g., `comfyui_sigmoidoffsetscheduler@nightly`)
|
|
||||||
|
|
||||||
**Update Mechanism**:
|
|
||||||
- `git pull` on existing repository
|
|
||||||
- All user modifications in git working tree preserved by git
|
|
||||||
|
|
||||||
**File Structure**:
|
|
||||||
```
|
|
||||||
custom_nodes/
|
|
||||||
ComfyUI_SigmoidOffsetScheduler/
|
|
||||||
.git/ # Git repository marker
|
|
||||||
pyproject.toml
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
(git tracks all changes)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Version Switching Mechanisms
|
|
||||||
|
|
||||||
### CNR ↔ Nightly (Uses `.disabled/` Directory)
|
|
||||||
|
|
||||||
**Mechanism**: Enable/disable toggling - only ONE version active at a time
|
|
||||||
|
|
||||||
**Process**:
|
|
||||||
1. **CNR → Nightly**:
|
|
||||||
```
|
|
||||||
Before: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (has .tracking)
|
|
||||||
After: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (has .git)
|
|
||||||
.disabled/comfyui_sigmoidoffsetscheduler@1_0_2/ (has .tracking)
|
|
||||||
```
|
|
||||||
- Move archive directory to `.disabled/comfyui_sigmoidoffsetscheduler@{version}/`
|
|
||||||
- Git clone nightly to `custom_nodes/ComfyUI_SigmoidOffsetScheduler/`
|
|
||||||
|
|
||||||
2. **Nightly → CNR**:
|
|
||||||
```
|
|
||||||
Before: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (has .git)
|
|
||||||
.disabled/comfyui_sigmoidoffsetscheduler@1_0_2/ (has .tracking)
|
|
||||||
After: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (has .tracking)
|
|
||||||
.disabled/comfyui_sigmoidoffsetscheduler@nightly/ (has .git)
|
|
||||||
```
|
|
||||||
- Move nightly directory to `.disabled/comfyui_sigmoidoffsetscheduler@nightly/`
|
|
||||||
- Restore archive from `.disabled/comfyui_sigmoidoffsetscheduler@{version}/`
|
|
||||||
|
|
||||||
**Key Points**:
|
|
||||||
- Both versions preserved in filesystem (one in `.disabled/`)
|
|
||||||
- Switching is fast (just move operations)
|
|
||||||
- No re-download needed when switching back
|
|
||||||
|
|
||||||
### CNR Version Update (In-Place Update)
|
|
||||||
|
|
||||||
**Mechanism**: Direct directory content update - NO `.disabled/` directory used
|
|
||||||
|
|
||||||
**When**: Switching between different CNR versions (e.g., v1.0.1 → v1.0.2)
|
|
||||||
|
|
||||||
**Process**:
|
|
||||||
```
|
|
||||||
Before: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (v1.0.1, has .tracking)
|
|
||||||
After: custom_nodes/ComfyUI_SigmoidOffsetScheduler/ (v1.0.2, has .tracking)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Read `.tracking` to identify original v1.0.1 files
|
|
||||||
2. Delete only original v1.0.1 files (preserve user-created files)
|
|
||||||
3. Extract v1.0.2 archive to same directory
|
|
||||||
4. Update `.tracking` with v1.0.2 file list
|
|
||||||
5. Update `pyproject.toml` version metadata
|
|
||||||
|
|
||||||
**Critical**: Directory name and location remain unchanged
|
|
||||||
|
|
||||||
## API Design Decisions
|
|
||||||
|
|
||||||
### Enable/Disable Operations
|
|
||||||
|
|
||||||
**Design Decision**: ❌ **NO DIRECT ENABLE/DISABLE API PROVIDED**
|
|
||||||
|
|
||||||
**Rationale**:
|
|
||||||
- Enable/disable operations occur **ONLY as a by-product** of version switching
|
|
||||||
- Version switching is the primary operation that manages package state
|
|
||||||
- Direct enable/disable API would:
|
|
||||||
1. Create ambiguity about which version to enable/disable
|
|
||||||
2. Bypass version management logic
|
|
||||||
3. Lead to inconsistent package state
|
|
||||||
|
|
||||||
**Implementation**:
|
|
||||||
- `unified_enable()` and `unified_disable()` are **internal methods only**
|
|
||||||
- Called exclusively from version switching operations:
|
|
||||||
- `install_by_id()` (manager_core.py:1695-1724)
|
|
||||||
- `cnr_switch_version_instant()` (manager_core.py:941)
|
|
||||||
- `repo_update()` (manager_core.py:2144-2232)
|
|
||||||
|
|
||||||
**User Workflow**:
|
|
||||||
```
|
|
||||||
User wants to disable CNR version and enable Nightly:
|
|
||||||
✅ Correct: install(package, version="nightly")
|
|
||||||
→ automatically disables CNR, enables Nightly
|
|
||||||
❌ Wrong: disable(package) + enable(package, "nightly")
|
|
||||||
→ not supported, ambiguous
|
|
||||||
```
|
|
||||||
|
|
||||||
**Testing Approach**:
|
|
||||||
- Enable/disable tested **indirectly** through version switching tests
|
|
||||||
- Test 1-12 validate enable/disable behavior via install/update operations
|
|
||||||
- No direct enable/disable API tests needed (API doesn't exist)
|
|
||||||
|
|
||||||
## Implementation Details
|
|
||||||
|
|
||||||
### Version Detection Logic
|
|
||||||
|
|
||||||
**Location**: `comfyui_manager/common/node_package.py`
|
|
||||||
|
|
||||||
```python
|
|
||||||
@dataclass
|
|
||||||
class InstalledNodePackage:
|
|
||||||
@property
|
|
||||||
def is_nightly(self) -> bool:
|
|
||||||
return self.version == "nightly"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_from_cnr(self) -> bool:
|
|
||||||
return not self.is_unknown and not self.is_nightly
|
|
||||||
```
|
|
||||||
|
|
||||||
**Detection Order**:
|
|
||||||
1. Check for `.tracking` file → CNR (Archive) version
|
|
||||||
2. Check for `.git` directory → Nightly version
|
|
||||||
3. Otherwise → Unknown/legacy
|
|
||||||
|
|
||||||
### Reload Timing
|
|
||||||
|
|
||||||
**Critical**: `unified_manager.reload()` must be called:
|
|
||||||
1. **Before each queued task** (`manager_server.py:1245`):
|
|
||||||
```python
|
|
||||||
# Reload installed packages before each task to ensure latest state
|
|
||||||
core.unified_manager.reload()
|
|
||||||
```
|
|
||||||
2. **Before version switching** (`manager_core.py:1370`):
|
|
||||||
```python
|
|
||||||
# Reload to ensure we have the latest package state before checking
|
|
||||||
self.reload()
|
|
||||||
```
|
|
||||||
|
|
||||||
**Why**: Ensures `installed_node_packages` dict reflects actual filesystem state
|
|
||||||
|
|
||||||
### Disable Mechanism
|
|
||||||
|
|
||||||
**Implementation** (`manager_core.py:982-1017`, specifically line 1011):
|
|
||||||
```python
|
|
||||||
def unified_disable(self, packname: str):
|
|
||||||
# ... validation logic ...
|
|
||||||
|
|
||||||
# Generate disabled directory name with version suffix
|
|
||||||
base_path = extract_base_custom_nodes_dir(matched_active.fullpath)
|
|
||||||
folder_name = packname if not self.is_url_like(packname) else os.path.basename(matched_active.fullpath)
|
|
||||||
to_path = os.path.join(base_path, '.disabled', f"{folder_name}@{matched_active.version.replace('.', '_')}")
|
|
||||||
|
|
||||||
shutil.move(matched_active.fullpath, to_path)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Naming Convention**:
|
|
||||||
- `{folder_name}@{version}` format for ALL version types
|
|
||||||
- CNR v1.0.2 → `comfyui_foo@1_0_2` (dots replaced with underscores)
|
|
||||||
- Nightly → `comfyui_foo@nightly`
|
|
||||||
|
|
||||||
### Case Sensitivity Fix
|
|
||||||
|
|
||||||
**Problem**: Package IDs normalized to lowercase during indexing but not during lookup
|
|
||||||
|
|
||||||
**Solution** (`manager_core.py:1372-1378, 1388-1393`):
|
|
||||||
```python
|
|
||||||
# Normalize packname using centralized cnr_utils function
|
|
||||||
# CNR packages are indexed with lowercase IDs from pyproject.toml
|
|
||||||
packname_normalized = cnr_utils.normalize_package_name(packname)
|
|
||||||
|
|
||||||
if self.is_enabled(packname_normalized):
|
|
||||||
self.unified_disable(packname_normalized)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Why Centralized Function**:
|
|
||||||
- Consistent normalization across entire codebase
|
|
||||||
- Single source of truth for package name normalization logic
|
|
||||||
- Easier to maintain and test
|
|
||||||
- Located in `cnr_utils.py:28-48`
|
|
||||||
|
|
||||||
## Directory Structure Examples
|
|
||||||
|
|
||||||
### Complete Example: All Version Types Coexisting
|
|
||||||
|
|
||||||
```
|
|
||||||
custom_nodes/
|
|
||||||
ComfyUI_SigmoidOffsetScheduler/ # Active version (CNR v2.0.0 in this example)
|
|
||||||
pyproject.toml # name = "ComfyUI_SigmoidOffsetScheduler"
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
|
|
||||||
.disabled/ # Inactive versions storage
|
|
||||||
comfyui_sigmoidoffsetscheduler@nightly/ # ← Nightly (disabled)
|
|
||||||
.git/ # ← Nightly marker
|
|
||||||
pyproject.toml
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
|
|
||||||
comfyui_sigmoidoffsetscheduler@1_0_2/ # ← CNR v1.0.2 (disabled)
|
|
||||||
.tracking # ← CNR marker with file list
|
|
||||||
pyproject.toml
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
|
|
||||||
comfyui_sigmoidoffsetscheduler@1_0_1/ # ← CNR v1.0.1 (disabled)
|
|
||||||
.tracking
|
|
||||||
pyproject.toml
|
|
||||||
__init__.py
|
|
||||||
nodes.py
|
|
||||||
```
|
|
||||||
|
|
||||||
**Key Points**:
|
|
||||||
- Active directory ALWAYS uses `original_name` without version suffix
|
|
||||||
- Each disabled version has `@{version}` suffix to avoid conflicts
|
|
||||||
- Multiple disabled versions can coexist (nightly + multiple CNR versions)
|
|
||||||
|
|
||||||
## Summary Table
|
|
||||||
|
|
||||||
| Version Type | Purpose | Marker | Active Directory Name | Disabled Directory Name | Update Method | Switch Mechanism |
|
|
||||||
|--------------|---------|--------|----------------------|------------------------|---------------|------------------|
|
|
||||||
| **CNR** (Archive) | Production-ready releases with semantic versioning, published to CNR server and verified | `.tracking` file | `original_name` (e.g., `ComfyUI_Foo`) | `{package}@{version}` (e.g., `comfyui_foo@1_0_2`) | In-place update (preserve user files) | `.disabled/` toggle |
|
|
||||||
| **Nightly** | Real-time development builds from Git repository without semantic versioning | `.git/` directory | `original_name` (e.g., `ComfyUI_Foo`) | `{package}@nightly` (e.g., `comfyui_foo@nightly`) | `git pull` | `.disabled/` toggle |
|
|
||||||
|
|
||||||
**Important Constraints**:
|
|
||||||
- **Active directory name**: MUST use `original_name` (from `pyproject.toml`) without version suffix
|
|
||||||
- Other code may depend on this specific directory name
|
|
||||||
- Only ONE version can be active at a time
|
|
||||||
- **Disabled directory name**: MUST include `@{version}` suffix to allow multiple disabled versions to coexist
|
|
||||||
- CNR: `@{version}` (e.g., `@1_0_2`)
|
|
||||||
- Nightly: `@nightly`
|
|
||||||
|
|
||||||
## Edge Cases
|
|
||||||
|
|
||||||
### 1. Multiple CNR Versions
|
|
||||||
- Each stored in `.disabled/` with version suffix
|
|
||||||
- Only one can be active at a time
|
|
||||||
- Switching between CNR versions = direct content update (not via `.disabled/`)
|
|
||||||
|
|
||||||
### 2. Package ID Case Variations
|
|
||||||
- Always normalize to lowercase for internal lookups
|
|
||||||
- Preserve original case in filesystem/display
|
|
||||||
- Match against lowercase indexed keys
|
|
||||||
|
|
||||||
### 3. Corrupted `.tracking` File
|
|
||||||
- Treat as unknown version type
|
|
||||||
- Warn user before update/uninstall
|
|
||||||
- May require manual cleanup
|
|
||||||
|
|
||||||
### 4. Mixed CNR + Nightly in `.disabled/`
|
|
||||||
- Both can coexist in `.disabled/`
|
|
||||||
- Only one can be active in `custom_nodes/`
|
|
||||||
- Switch logic detects type and handles appropriately
|
|
||||||
@@ -1,235 +0,0 @@
|
|||||||
# Security-Enhanced URL Installation System
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Security constraints have been added to the `install_by_url` function to control URL-based installations according to the system's security level.
|
|
||||||
|
|
||||||
## Security Level and Risk Level Framework
|
|
||||||
|
|
||||||
### Security Levels (SecurityLevel)
|
|
||||||
- **strong**: Most restrictive, only trusted sources allowed
|
|
||||||
- **normal**: Standard security, most known platforms allowed
|
|
||||||
- **normal-**: Relaxed security, additional allowances for personal cloud environments
|
|
||||||
- **weak**: Most permissive security, for local development environments
|
|
||||||
|
|
||||||
### Risk Levels (RiskLevel)
|
|
||||||
- **block**: Complete block (always denied)
|
|
||||||
- **high+**: Very high risk (only allowed in local mode + weak/normal-)
|
|
||||||
- **high**: High risk (only allowed in local mode + weak/normal- or personal cloud + weak)
|
|
||||||
- **middle+**: Medium-high risk (weak/normal/normal- allowed in local/personal cloud)
|
|
||||||
- **middle**: Medium risk (weak/normal/normal- allowed in all environments)
|
|
||||||
|
|
||||||
## URL Risk Assessment Logic
|
|
||||||
|
|
||||||
### Low Risk (middle) - Trusted Platforms
|
|
||||||
```
|
|
||||||
- github.com
|
|
||||||
- gitlab.com
|
|
||||||
- bitbucket.org
|
|
||||||
- raw.githubusercontent.com
|
|
||||||
- gitlab.io
|
|
||||||
```
|
|
||||||
|
|
||||||
### High Risk (high+) - Suspicious/Local Hosting
|
|
||||||
```
|
|
||||||
- localhost, 127.0.0.1
|
|
||||||
- Private IP ranges: 192.168.*, 10.0.*, 172.*
|
|
||||||
- Temporary hosting: ngrok.io, herokuapp.com, repl.it, glitch.me
|
|
||||||
```
|
|
||||||
|
|
||||||
### Medium-High Risk (middle+) - Unknown Domains
|
|
||||||
```
|
|
||||||
- All domains not belonging to the above categories
|
|
||||||
```
|
|
||||||
|
|
||||||
### High Risk (high) - SSH Protocol
|
|
||||||
```
|
|
||||||
- URLs starting with ssh:// or git@
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implemented Security Features
|
|
||||||
|
|
||||||
### 1. Security Validation (`_validate_url_security`)
|
|
||||||
```python
|
|
||||||
async def install_by_url(self, url: str, ...):
|
|
||||||
# Security validation
|
|
||||||
security_result = self._validate_url_security(url)
|
|
||||||
if not security_result['allowed']:
|
|
||||||
return self._report_failed_install_security(url, security_result['reason'], custom_name)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Features**:
|
|
||||||
- Check current security level
|
|
||||||
- Assess URL risk
|
|
||||||
- Allow/block decision based on security policy
|
|
||||||
|
|
||||||
### 2. Failure Reporting (`_report_failed_install_security`)
|
|
||||||
```python
|
|
||||||
def _report_failed_install_security(self, url: str, reason: str, custom_name=None):
|
|
||||||
# Security block logging
|
|
||||||
print(f"[SECURITY] Blocked URL installation: {url}")
|
|
||||||
|
|
||||||
# Record failed installation
|
|
||||||
self._record_failed_install_nodepack({
|
|
||||||
'type': 'url-security-block',
|
|
||||||
'url': url,
|
|
||||||
'package_name': pack_name,
|
|
||||||
'reason': reason,
|
|
||||||
'security_level': current_security_level,
|
|
||||||
'timestamp': timestamp
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
**Features**:
|
|
||||||
- Log blocked installation attempts to console
|
|
||||||
- Save failure information in structured format
|
|
||||||
- Return failure result as ManagedResult
|
|
||||||
|
|
||||||
### 3. Failed Installation Record Management (`_record_failed_install_nodepack`)
|
|
||||||
```python
|
|
||||||
def get_failed_install_reports(self) -> list:
|
|
||||||
return getattr(self, '_failed_installs', [])
|
|
||||||
```
|
|
||||||
|
|
||||||
**Features**:
|
|
||||||
- Maintain recent 100 failure records
|
|
||||||
- Prevent memory overflow
|
|
||||||
- Provide API for monitoring and debugging
|
|
||||||
|
|
||||||
## Usage Examples
|
|
||||||
|
|
||||||
### Behavior by Security Setting
|
|
||||||
|
|
||||||
#### Strong Security Level
|
|
||||||
```python
|
|
||||||
# Most URLs are blocked
|
|
||||||
result = await manager.install_by_url("https://github.com/user/repo")
|
|
||||||
# Result: Blocked (github is also middle risk, so blocked at strong level)
|
|
||||||
|
|
||||||
result = await manager.install_by_url("https://suspicious-domain.com/repo.git")
|
|
||||||
# Result: Blocked (middle+ risk)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Normal Security Level
|
|
||||||
```python
|
|
||||||
# Trusted platforms allowed
|
|
||||||
result = await manager.install_by_url("https://github.com/user/repo")
|
|
||||||
# Result: Allowed
|
|
||||||
|
|
||||||
result = await manager.install_by_url("https://localhost/repo.git")
|
|
||||||
# Result: Blocked (high+ risk)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Weak Security Level (Local Development Environment)
|
|
||||||
```python
|
|
||||||
# Almost all URLs allowed
|
|
||||||
result = await manager.install_by_url("https://github.com/user/repo")
|
|
||||||
# Result: Allowed
|
|
||||||
|
|
||||||
result = await manager.install_by_url("https://192.168.1.100/repo.git")
|
|
||||||
# Result: Allowed (in local mode)
|
|
||||||
|
|
||||||
result = await manager.install_by_url("git@private-server.com:user/repo.git")
|
|
||||||
# Result: Allowed
|
|
||||||
```
|
|
||||||
|
|
||||||
### Failure Monitoring
|
|
||||||
```python
|
|
||||||
manager = UnifiedManager()
|
|
||||||
|
|
||||||
# Blocked installation attempt
|
|
||||||
await manager.install_by_url("https://malicious-site.com/evil-nodes.git")
|
|
||||||
|
|
||||||
# Check failure records
|
|
||||||
failed_reports = manager.get_failed_install_reports()
|
|
||||||
for report in failed_reports:
|
|
||||||
print(f"Blocked: {report['url']} - {report['reason']}")
|
|
||||||
```
|
|
||||||
|
|
||||||
## Security Policy Matrix
|
|
||||||
|
|
||||||
| Risk Level | Strong | Normal | Normal- | Weak |
|
|
||||||
|------------|--------|--------|---------|------|
|
|
||||||
| **block** | ❌ | ❌ | ❌ | ❌ |
|
|
||||||
| **high+** | ❌ | ❌ | 🔒* | 🔒* |
|
|
||||||
| **high** | ❌ | ❌ | 🔒*/☁️** | ✅ |
|
|
||||||
| **middle+**| ❌ | ❌ | 🔒*/☁️** | ✅ |
|
|
||||||
| **middle** | ❌ | ✅ | ✅ | ✅ |
|
|
||||||
|
|
||||||
- 🔒* : Allowed only in local mode
|
|
||||||
- ☁️** : Allowed only in personal cloud mode
|
|
||||||
- ✅ : Allowed
|
|
||||||
- ❌ : Blocked
|
|
||||||
|
|
||||||
## Error Message Examples
|
|
||||||
|
|
||||||
### Security Block
|
|
||||||
```
|
|
||||||
Installation blocked by security policy: URL installation blocked by security level: strong (risk: middle)
|
|
||||||
Target: awesome-nodes@url-blocked
|
|
||||||
```
|
|
||||||
|
|
||||||
### Console Log
|
|
||||||
```
|
|
||||||
[SECURITY] Blocked URL installation: https://suspicious-domain.com/repo.git
|
|
||||||
[SECURITY] Reason: URL installation blocked by security level: normal (risk: middle+)
|
|
||||||
[SECURITY] Package: repo
|
|
||||||
```
|
|
||||||
|
|
||||||
## Configuration Recommendations
|
|
||||||
|
|
||||||
### Production Environment
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"security_level": "strong",
|
|
||||||
"network_mode": "private"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- Most restrictive settings
|
|
||||||
- Only trusted sources allowed
|
|
||||||
|
|
||||||
### Development Environment
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"security_level": "weak",
|
|
||||||
"network_mode": "local"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- Permissive settings for development convenience
|
|
||||||
- Allow local repositories and development servers
|
|
||||||
|
|
||||||
### Personal Cloud Environment
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"security_level": "normal-",
|
|
||||||
"network_mode": "personal_cloud"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- Balanced settings for personal use
|
|
||||||
- Allow personal repository access
|
|
||||||
|
|
||||||
## Security Enhancement Benefits
|
|
||||||
|
|
||||||
### 1. Malware Prevention
|
|
||||||
- Automatic blocking from unknown sources
|
|
||||||
- Filter suspicious domains and IPs
|
|
||||||
|
|
||||||
### 2. Network Security
|
|
||||||
- Control private network access
|
|
||||||
- Restrict SSH protocol usage
|
|
||||||
|
|
||||||
### 3. Audit Trail
|
|
||||||
- Record all blocked attempts
|
|
||||||
- Log security events
|
|
||||||
|
|
||||||
### 4. Flexible Policy
|
|
||||||
- Customized security levels per environment
|
|
||||||
- Distinguish between production/development environments
|
|
||||||
|
|
||||||
## Backward Compatibility
|
|
||||||
|
|
||||||
- Existing `install_by_id` function unchanged
|
|
||||||
- No security validation applied to CNR-based installations
|
|
||||||
- `install_by_id_or_url` applies security only to URLs
|
|
||||||
|
|
||||||
This security enhancement significantly improves system security while maintaining the convenience of URL-based installations.
|
|
||||||
@@ -1,355 +0,0 @@
|
|||||||
# CNR Version Management Design
|
|
||||||
|
|
||||||
**Version**: 1.1
|
|
||||||
**Date**: 2025-11-08
|
|
||||||
**Status**: Official Design Policy
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
This document describes the official design policy for CNR (ComfyUI Node Registry) version management in ComfyUI Manager.
|
|
||||||
|
|
||||||
## Core Design Principles
|
|
||||||
|
|
||||||
### 1. In-Place Upgrade Policy
|
|
||||||
|
|
||||||
**Policy**: CNR upgrades are performed as **in-place replacements** without version history preservation.
|
|
||||||
|
|
||||||
**Rationale**:
|
|
||||||
- **Simplicity**: Single version management is easier for users and maintainers
|
|
||||||
- **Disk Space**: Prevents accumulation of old package versions
|
|
||||||
- **Clear State**: Users always know which version is active
|
|
||||||
- **Consistency**: Same behavior for enabled and disabled states
|
|
||||||
|
|
||||||
**Behavior**:
|
|
||||||
```
|
|
||||||
Before: custom_nodes/PackageName/ (CNR v1.0.1 with .tracking)
|
|
||||||
Action: Install CNR v1.0.2
|
|
||||||
After: custom_nodes/PackageName/ (CNR v1.0.2 with .tracking)
|
|
||||||
Result: Old v1.0.1 REMOVED (not preserved)
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Single CNR Version Policy
|
|
||||||
|
|
||||||
**Policy**: Only **ONE CNR version** exists at any given time (either enabled OR disabled, never both).
|
|
||||||
|
|
||||||
**Rationale**:
|
|
||||||
- **State Clarity**: No ambiguity about which CNR version is current
|
|
||||||
- **Resource Management**: Minimal disk usage
|
|
||||||
- **User Experience**: Clear version state without confusion
|
|
||||||
- **Design Consistency**: Uniform handling across operations
|
|
||||||
|
|
||||||
**States**:
|
|
||||||
- **Enabled**: `custom_nodes/PackageName/` (with `.tracking`)
|
|
||||||
- **Disabled**: `.disabled/packagename@version/` (with `.tracking`)
|
|
||||||
- **Never**: Multiple CNR versions coexisting
|
|
||||||
|
|
||||||
### 3. CNR vs Nightly Differentiation
|
|
||||||
|
|
||||||
**Policy**: Different handling for CNR and Nightly packages based on use cases.
|
|
||||||
|
|
||||||
| Aspect | CNR Packages (`.tracking`) | Nightly Packages (`.git`) |
|
|
||||||
|--------|----------------------------|---------------------------|
|
|
||||||
| **Purpose** | Stable releases | Development versions |
|
|
||||||
| **Preservation** | Not preserved (in-place upgrade) | Preserved (multiple versions) |
|
|
||||||
| **Version Policy** | Single version only | Multiple versions allowed |
|
|
||||||
| **Use Case** | Production use | Testing and development |
|
|
||||||
|
|
||||||
**Rationale**:
|
|
||||||
- **CNR**: Stable releases don't need version history; users want single stable version
|
|
||||||
- **Nightly**: Development versions benefit from multiple versions for testing
|
|
||||||
|
|
||||||
### 4. API Response Priority Rules
|
|
||||||
|
|
||||||
**Policy**: The `/v2/customnode/installed` API applies two priority rules to prevent duplicate package entries and ensure clear state representation.
|
|
||||||
|
|
||||||
**Rule 1 (Enabled-Priority)**:
|
|
||||||
- **Policy**: When both enabled and disabled versions of the same package exist → Return ONLY the enabled version
|
|
||||||
- **Rationale**: Prevents frontend confusion from duplicate package entries
|
|
||||||
- **Implementation**: `comfyui_manager/glob/manager_core.py:1801` in `get_installed_nodepacks()`
|
|
||||||
|
|
||||||
**Rule 2 (CNR-Priority for Disabled Packages)**:
|
|
||||||
- **Policy**: When both CNR and Nightly versions are disabled → Return ONLY the CNR version
|
|
||||||
- **Rationale**: CNR versions are stable releases and should be preferred over development Nightly builds when both are inactive
|
|
||||||
- **Implementation**: `comfyui_manager/glob/manager_core.py:1801` in `get_installed_nodepacks()`
|
|
||||||
|
|
||||||
**Priority Matrix**:
|
|
||||||
|
|
||||||
| Scenario | Enabled Versions | Disabled Versions | API Response |
|
|
||||||
|----------|------------------|-------------------|--------------|
|
|
||||||
| 1. CNR enabled only | CNR v1.0.1 | None | CNR v1.0.1 (`enabled: true`) |
|
|
||||||
| 2. CNR enabled + Nightly disabled | CNR v1.0.1 | Nightly | **Only CNR v1.0.1** (`enabled: true`) ← Rule 1 |
|
|
||||||
| 3. Nightly enabled + CNR disabled | Nightly | CNR v1.0.1 | **Only Nightly** (`enabled: true`) ← Rule 1 |
|
|
||||||
| 4. CNR disabled + Nightly disabled | None | CNR v1.0.1, Nightly | **Only CNR v1.0.1** (`enabled: false`) ← Rule 2 |
|
|
||||||
| 5. Different packages disabled | None | PackageA, PackageB | Both packages (`enabled: false`) |
|
|
||||||
|
|
||||||
**Test Coverage**:
|
|
||||||
- `tests/glob/test_installed_api_enabled_priority.py`
|
|
||||||
- `test_installed_api_shows_only_enabled_when_both_exist` - Verifies Rule 1
|
|
||||||
- `test_installed_api_cnr_priority_when_both_disabled` - Verifies Rule 2
|
|
||||||
|
|
||||||
## Detailed Behavior Specifications
|
|
||||||
|
|
||||||
### CNR Upgrade (Enabled → Enabled)
|
|
||||||
|
|
||||||
**Scenario**: Upgrading from CNR v1.0.1 to v1.0.2 when v1.0.1 is enabled
|
|
||||||
|
|
||||||
```
|
|
||||||
Initial State:
|
|
||||||
custom_nodes/PackageName/ (CNR v1.0.1 with .tracking)
|
|
||||||
|
|
||||||
Action:
|
|
||||||
Install CNR v1.0.2
|
|
||||||
|
|
||||||
Process:
|
|
||||||
1. Download CNR v1.0.2
|
|
||||||
2. Remove existing custom_nodes/PackageName/
|
|
||||||
3. Install CNR v1.0.2 to custom_nodes/PackageName/
|
|
||||||
4. Create .tracking file
|
|
||||||
|
|
||||||
Final State:
|
|
||||||
custom_nodes/PackageName/ (CNR v1.0.2 with .tracking)
|
|
||||||
|
|
||||||
Result:
|
|
||||||
✓ v1.0.2 installed and enabled
|
|
||||||
✓ v1.0.1 completely removed
|
|
||||||
✓ No version history preserved
|
|
||||||
```
|
|
||||||
|
|
||||||
### CNR Switch from Disabled
|
|
||||||
|
|
||||||
**Scenario**: Switching from disabled CNR v1.0.1 to CNR v1.0.2
|
|
||||||
|
|
||||||
```
|
|
||||||
Initial State:
|
|
||||||
custom_nodes/PackageName/ (Nightly with .git)
|
|
||||||
.disabled/packagename@1_0_1/ (CNR v1.0.1 with .tracking)
|
|
||||||
|
|
||||||
User Action:
|
|
||||||
Install CNR v1.0.2
|
|
||||||
|
|
||||||
Process:
|
|
||||||
Step 1: Enable disabled CNR v1.0.1
|
|
||||||
- Move .disabled/packagename@1_0_1/ → custom_nodes/PackageName/
|
|
||||||
- Move custom_nodes/PackageName/ → .disabled/packagename@nightly/
|
|
||||||
|
|
||||||
Step 2: Upgrade CNR v1.0.1 → v1.0.2 (in-place)
|
|
||||||
- Download CNR v1.0.2
|
|
||||||
- Remove custom_nodes/PackageName/
|
|
||||||
- Install CNR v1.0.2 to custom_nodes/PackageName/
|
|
||||||
|
|
||||||
Final State:
|
|
||||||
custom_nodes/PackageName/ (CNR v1.0.2 with .tracking)
|
|
||||||
.disabled/packagename@nightly/ (Nightly preserved)
|
|
||||||
|
|
||||||
Result:
|
|
||||||
✓ CNR v1.0.2 installed and enabled
|
|
||||||
✓ CNR v1.0.1 removed (not preserved in .disabled/)
|
|
||||||
✓ Nightly preserved in .disabled/
|
|
||||||
```
|
|
||||||
|
|
||||||
### CNR Disable
|
|
||||||
|
|
||||||
**Scenario**: Disabling CNR v1.0.1 when Nightly exists
|
|
||||||
|
|
||||||
```
|
|
||||||
Initial State:
|
|
||||||
custom_nodes/PackageName/ (CNR v1.0.1 with .tracking)
|
|
||||||
|
|
||||||
Action:
|
|
||||||
Disable CNR v1.0.1
|
|
||||||
|
|
||||||
Final State:
|
|
||||||
.disabled/packagename@1_0_1/ (CNR v1.0.1 with .tracking)
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- Only ONE disabled CNR version exists
|
|
||||||
- If another CNR is already disabled, it is replaced
|
|
||||||
```
|
|
||||||
|
|
||||||
### Nightly Installation (with CNR Disabled)
|
|
||||||
|
|
||||||
**Scenario**: Installing Nightly when CNR v1.0.1 is disabled
|
|
||||||
|
|
||||||
```
|
|
||||||
Initial State:
|
|
||||||
.disabled/packagename@1_0_1/ (CNR v1.0.1 with .tracking)
|
|
||||||
|
|
||||||
Action:
|
|
||||||
Install Nightly
|
|
||||||
|
|
||||||
Final State:
|
|
||||||
custom_nodes/PackageName/ (Nightly with .git)
|
|
||||||
.disabled/packagename@1_0_1/ (CNR v1.0.1 preserved)
|
|
||||||
|
|
||||||
Result:
|
|
||||||
✓ Nightly installed and enabled
|
|
||||||
✓ Disabled CNR v1.0.1 preserved (not removed)
|
|
||||||
✓ Different handling for Nightly vs CNR
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Requirements
|
|
||||||
|
|
||||||
### CNR Install/Upgrade Operation
|
|
||||||
|
|
||||||
1. **Check for existing CNR versions**:
|
|
||||||
- Enabled: `custom_nodes/PackageName/` with `.tracking`
|
|
||||||
- Disabled: `.disabled/*` with `.tracking`
|
|
||||||
|
|
||||||
2. **Remove old CNR versions**:
|
|
||||||
- If enabled CNR exists: Remove it
|
|
||||||
- If disabled CNR exists: Remove it
|
|
||||||
- Ensure only ONE CNR version will exist after operation
|
|
||||||
|
|
||||||
3. **Install new CNR version**:
|
|
||||||
- Download and extract to target location
|
|
||||||
- Create `.tracking` file
|
|
||||||
- Register in package database
|
|
||||||
|
|
||||||
4. **Preserve Nightly packages**:
|
|
||||||
- Do NOT remove packages with `.git` directory
|
|
||||||
- Nightly packages should be preserved in `.disabled/`
|
|
||||||
|
|
||||||
### CNR Disable Operation
|
|
||||||
|
|
||||||
1. **Move enabled CNR to disabled**:
|
|
||||||
- Move `custom_nodes/PackageName/` → `.disabled/packagename@version/`
|
|
||||||
- Use **installed version** for directory name (not registry latest)
|
|
||||||
|
|
||||||
2. **Remove any existing disabled CNR**:
|
|
||||||
- Only ONE disabled CNR version allowed
|
|
||||||
- If another CNR already in `.disabled/`, remove it first
|
|
||||||
|
|
||||||
3. **Preserve disabled Nightly**:
|
|
||||||
- Do NOT remove disabled Nightly packages
|
|
||||||
- Multiple Nightly versions can coexist in `.disabled/`
|
|
||||||
|
|
||||||
### CNR Enable Operation
|
|
||||||
|
|
||||||
1. **Check for enabled package**:
|
|
||||||
- If another package enabled, disable it first
|
|
||||||
|
|
||||||
2. **Move disabled CNR to enabled**:
|
|
||||||
- Move `.disabled/packagename@version/` → `custom_nodes/PackageName/`
|
|
||||||
|
|
||||||
3. **Maintain single CNR policy**:
|
|
||||||
- After enable, no CNR should remain in `.disabled/`
|
|
||||||
- Only Nightly packages should remain in `.disabled/`
|
|
||||||
|
|
||||||
## Test Coverage
|
|
||||||
|
|
||||||
### Phase 7: Version Management Behavior Tests
|
|
||||||
|
|
||||||
**Test 7.1: `test_cnr_version_upgrade_removes_old`**
|
|
||||||
- ✅ Verifies in-place upgrade removes old CNR version
|
|
||||||
- ✅ Confirms only one CNR version exists after upgrade
|
|
||||||
- ✅ Documents single version policy
|
|
||||||
|
|
||||||
**Test 7.2: `test_cnr_nightly_switching_preserves_nightly_only`**
|
|
||||||
- ✅ Verifies Nightly preservation across CNR upgrades
|
|
||||||
- ✅ Confirms old CNR versions removed (not preserved)
|
|
||||||
- ✅ Documents different handling for CNR vs Nightly
|
|
||||||
|
|
||||||
### Other Relevant Tests
|
|
||||||
|
|
||||||
**Phase 1-6 Tests**:
|
|
||||||
- ✅ All tests comply with single CNR version policy
|
|
||||||
- ✅ No tests assume multiple CNR versions coexist
|
|
||||||
- ✅ Fixtures properly handle CNR vs Nightly differences
|
|
||||||
|
|
||||||
## Known Behaviors
|
|
||||||
|
|
||||||
### Correct Behaviors (By Design)
|
|
||||||
|
|
||||||
1. **CNR Upgrades Remove Old Versions**
|
|
||||||
- Status: ✅ Intentional design
|
|
||||||
- Reason: In-place upgrade policy
|
|
||||||
- Test: Phase 7.1 verifies this
|
|
||||||
|
|
||||||
2. **Only One CNR Version Exists**
|
|
||||||
- Status: ✅ Intentional design
|
|
||||||
- Reason: Single version policy
|
|
||||||
- Test: Phase 7.2 verifies this
|
|
||||||
|
|
||||||
3. **Nightly Preserved, CNR Not**
|
|
||||||
- Status: ✅ Intentional design
|
|
||||||
- Reason: Different use cases
|
|
||||||
- Test: Phase 7.2 verifies this
|
|
||||||
|
|
||||||
### Known Issues
|
|
||||||
|
|
||||||
1. **Disable API Version Mismatch**
|
|
||||||
- Status: ⚠️ Bug to be fixed
|
|
||||||
- Issue: Disabled directory name uses registry latest instead of installed version
|
|
||||||
- Impact: Incorrect directory naming
|
|
||||||
- Priority: Medium
|
|
||||||
|
|
||||||
## Design Rationale
|
|
||||||
|
|
||||||
### Why In-Place Upgrade?
|
|
||||||
|
|
||||||
**Benefits**:
|
|
||||||
- Simple mental model for users
|
|
||||||
- No disk space accumulation
|
|
||||||
- Clear version state
|
|
||||||
- Easier maintenance
|
|
||||||
|
|
||||||
**Trade-offs**:
|
|
||||||
- No automatic rollback capability
|
|
||||||
- Users must reinstall old versions from registry
|
|
||||||
- Network required for version downgrades
|
|
||||||
|
|
||||||
**Decision**: Benefits outweigh trade-offs for stable release management.
|
|
||||||
|
|
||||||
### Why Different CNR vs Nightly Handling?
|
|
||||||
|
|
||||||
**CNR (Stable Releases)**:
|
|
||||||
- Users want single stable version
|
|
||||||
- Production use case
|
|
||||||
- Rollback via registry if needed
|
|
||||||
|
|
||||||
**Nightly (Development Builds)**:
|
|
||||||
- Developers test multiple versions
|
|
||||||
- Development use case
|
|
||||||
- Local version testing important
|
|
||||||
|
|
||||||
**Decision**: Different use cases justify different policies.
|
|
||||||
|
|
||||||
## Future Considerations
|
|
||||||
|
|
||||||
### Potential Enhancements (Not Currently Planned)
|
|
||||||
|
|
||||||
1. **Optional Version History**
|
|
||||||
- Configurable preservation of last N versions
|
|
||||||
- Opt-in via configuration flag
|
|
||||||
- Separate history directory
|
|
||||||
|
|
||||||
2. **CNR Rollback API**
|
|
||||||
- Dedicated rollback endpoint
|
|
||||||
- Re-download from registry
|
|
||||||
- Preserve current version before downgrade
|
|
||||||
|
|
||||||
3. **Version Pinning**
|
|
||||||
- Pin specific CNR version
|
|
||||||
- Prevent automatic upgrades
|
|
||||||
- Per-package configuration
|
|
||||||
|
|
||||||
**Note**: These are potential future enhancements, not current requirements.
|
|
||||||
|
|
||||||
## Version History
|
|
||||||
|
|
||||||
| Version | Date | Changes |
|
|
||||||
|---------|------|---------|
|
|
||||||
| 1.1 | 2025-11-08 | Added API Response Priority Rules (Rule 1: Enabled-Priority, Rule 2: CNR-Priority) |
|
|
||||||
| 1.0 | 2025-11-06 | Initial design document based on user clarification |
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- Phase 7 Test Implementation: `tests/glob/test_complex_scenarios.py`
|
|
||||||
- Policy Clarification: `.claude/livecontext/cnr_version_policy_clarification.md`
|
|
||||||
- Bug Report: `.claude/livecontext/bugs_to_file.md`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Approved By**: User feedback 2025-11-06
|
|
||||||
**Status**: Official Policy
|
|
||||||
**Compliance**: All tests verified against this policy
|
|
||||||
@@ -1,292 +0,0 @@
|
|||||||
# Glob Module API Reference for CLI Migration
|
|
||||||
|
|
||||||
## 🎯 Quick Reference
|
|
||||||
This document provides essential glob module APIs available for CLI implementation. **READ ONLY** - do not modify glob module.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📦 Core Classes
|
|
||||||
|
|
||||||
### UnifiedManager
|
|
||||||
**Location**: `comfyui_manager/glob/manager_core.py:436`
|
|
||||||
**Instance**: Available as `unified_manager` (global instance)
|
|
||||||
|
|
||||||
#### Data Structures
|
|
||||||
```python
|
|
||||||
class UnifiedManager:
|
|
||||||
def __init__(self):
|
|
||||||
# PRIMARY DATA - Use these instead of legacy dicts
|
|
||||||
self.installed_node_packages: dict[str, list[InstalledNodePackage]]
|
|
||||||
self.repo_nodepack_map: dict[str, InstalledNodePackage] # compact_url -> package
|
|
||||||
self.processed_install: set
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Core Methods (Direct CLI Equivalents)
|
|
||||||
```python
|
|
||||||
# Installation & Management
|
|
||||||
async def install_by_id(packname: str, version_spec=None, channel=None,
|
|
||||||
mode=None, instant_execution=False, no_deps=False,
|
|
||||||
return_postinstall=False) -> ManagedResult
|
|
||||||
def unified_enable(packname: str, version_spec=None) -> ManagedResult
|
|
||||||
def unified_disable(packname: str) -> ManagedResult
|
|
||||||
def unified_uninstall(packname: str) -> ManagedResult
|
|
||||||
def unified_update(packname: str, instant_execution=False, no_deps=False,
|
|
||||||
return_postinstall=False) -> ManagedResult
|
|
||||||
def unified_fix(packname: str, version_spec, instant_execution=False,
|
|
||||||
no_deps=False) -> ManagedResult
|
|
||||||
|
|
||||||
# Package Resolution & Info
|
|
||||||
def resolve_node_spec(packname: str, guess_mode=None) -> tuple[str, str, bool] | None
|
|
||||||
def get_active_pack(packname: str) -> InstalledNodePackage | None
|
|
||||||
def get_inactive_pack(packname: str, version_spec=None) -> InstalledNodePackage | None
|
|
||||||
|
|
||||||
# Git Repository Operations
|
|
||||||
async def repo_install(url: str, repo_path: str, instant_execution=False,
|
|
||||||
no_deps=False, return_postinstall=False) -> ManagedResult
|
|
||||||
def repo_update(repo_path: str, instant_execution=False, no_deps=False,
|
|
||||||
return_postinstall=False) -> ManagedResult
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
def is_url_like(url: str) -> bool
|
|
||||||
def reload() -> None
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### InstalledNodePackage
|
|
||||||
**Location**: `comfyui_manager/common/node_package.py:10`
|
|
||||||
|
|
||||||
```python
|
|
||||||
@dataclass
|
|
||||||
class InstalledNodePackage:
|
|
||||||
# Core Data
|
|
||||||
id: str # Package identifier
|
|
||||||
fullpath: str # Installation path
|
|
||||||
disabled: bool # Disabled state
|
|
||||||
version: str # Version (cnr version, "nightly", or "unknown")
|
|
||||||
repo_url: str = None # Git repository URL (for nightly/unknown)
|
|
||||||
|
|
||||||
# Computed Properties
|
|
||||||
@property
|
|
||||||
def is_unknown(self) -> bool: # version == "unknown"
|
|
||||||
@property
|
|
||||||
def is_nightly(self) -> bool: # version == "nightly"
|
|
||||||
@property
|
|
||||||
def is_from_cnr(self) -> bool: # not unknown and not nightly
|
|
||||||
@property
|
|
||||||
def is_enabled(self) -> bool: # not disabled
|
|
||||||
@property
|
|
||||||
def is_disabled(self) -> bool: # disabled
|
|
||||||
|
|
||||||
# Methods
|
|
||||||
def get_commit_hash(self) -> str
|
|
||||||
def isValid(self) -> bool
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_fullpath(fullpath: str, resolve_from_path) -> InstalledNodePackage
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### ManagedResult
|
|
||||||
**Location**: `comfyui_manager/glob/manager_core.py:285`
|
|
||||||
|
|
||||||
```python
|
|
||||||
class ManagedResult:
|
|
||||||
def __init__(self, action: str):
|
|
||||||
self.action: str = action # 'install-cnr', 'install-git', 'enable', 'skip', etc.
|
|
||||||
self.result: bool = True # Success/failure
|
|
||||||
self.msg: str = "" # Human readable message
|
|
||||||
self.target: str = None # Target identifier
|
|
||||||
self.postinstall = None # Post-install callback
|
|
||||||
|
|
||||||
# Methods
|
|
||||||
def fail(self, msg: str = "") -> ManagedResult
|
|
||||||
def with_msg(self, msg: str) -> ManagedResult
|
|
||||||
def with_target(self, target: str) -> ManagedResult
|
|
||||||
def with_postinstall(self, postinstall) -> ManagedResult
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🛠️ Standalone Functions
|
|
||||||
|
|
||||||
### Core Manager Functions
|
|
||||||
```python
|
|
||||||
# Snapshot Operations
|
|
||||||
async def save_snapshot_with_postfix(postfix: str, path: str = None,
|
|
||||||
custom_nodes_only: bool = False) -> str
|
|
||||||
|
|
||||||
async def restore_snapshot(snapshot_path: str, git_helper_extras=None) -> None
|
|
||||||
|
|
||||||
# Node Utilities
|
|
||||||
def simple_check_custom_node(url: str) -> str # Returns: 'installed', 'not-installed', 'disabled'
|
|
||||||
|
|
||||||
# Path Utilities
|
|
||||||
def get_custom_nodes_paths() -> list[str]
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔗 CNR Utilities
|
|
||||||
**Location**: `comfyui_manager/common/cnr_utils.py`
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Essential CNR functions for CLI
|
|
||||||
def get_nodepack(packname: str) -> dict | None
|
|
||||||
# Returns CNR package info or None
|
|
||||||
|
|
||||||
def get_all_nodepackages() -> dict[str, dict]
|
|
||||||
# Returns all CNR packages {package_id: package_info}
|
|
||||||
|
|
||||||
def all_versions_of_node(node_name: str) -> list[dict] | None
|
|
||||||
# Returns version history for a package
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Usage Patterns for CLI Migration
|
|
||||||
|
|
||||||
### 1. Replace Legacy Dict Access
|
|
||||||
```python
|
|
||||||
# ❌ OLD (Legacy way)
|
|
||||||
for k, v in unified_manager.active_nodes.items():
|
|
||||||
version, fullpath = v
|
|
||||||
print(f"Active: {k} @ {version}")
|
|
||||||
|
|
||||||
# ✅ NEW (Glob way)
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
for pack in packages:
|
|
||||||
if pack.is_enabled:
|
|
||||||
print(f"Active: {pack.id} @ {pack.version}")
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Package Installation
|
|
||||||
```python
|
|
||||||
# CNR Package Installation
|
|
||||||
res = await unified_manager.install_by_id("package-name", "1.0.0",
|
|
||||||
instant_execution=True, no_deps=False)
|
|
||||||
|
|
||||||
# Git URL Installation
|
|
||||||
if unified_manager.is_url_like(url):
|
|
||||||
repo_name = os.path.basename(url).replace('.git', '')
|
|
||||||
res = await unified_manager.repo_install(url, repo_name,
|
|
||||||
instant_execution=True, no_deps=False)
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Package State Queries
|
|
||||||
```python
|
|
||||||
# Check if package is active
|
|
||||||
active_pack = unified_manager.get_active_pack("package-name")
|
|
||||||
if active_pack:
|
|
||||||
print(f"Package is enabled: {active_pack.version}")
|
|
||||||
|
|
||||||
# Check if package is inactive
|
|
||||||
inactive_pack = unified_manager.get_inactive_pack("package-name")
|
|
||||||
if inactive_pack:
|
|
||||||
print(f"Package is disabled: {inactive_pack.version}")
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. CNR Data Access
|
|
||||||
```python
|
|
||||||
# Get CNR package information
|
|
||||||
from ..common import cnr_utils
|
|
||||||
|
|
||||||
cnr_info = cnr_utils.get_nodepack("package-name")
|
|
||||||
if cnr_info:
|
|
||||||
publisher = cnr_info.get('publisher', {}).get('name', 'Unknown')
|
|
||||||
print(f"Publisher: {publisher}")
|
|
||||||
|
|
||||||
# Get all CNR packages (for show not-installed)
|
|
||||||
all_cnr = cnr_utils.get_all_nodepackages()
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Result Handling
|
|
||||||
```python
|
|
||||||
res = await unified_manager.install_by_id("package-name")
|
|
||||||
|
|
||||||
if res.action == 'skip':
|
|
||||||
print(f"SKIP: {res.msg}")
|
|
||||||
elif res.action == 'install-cnr' and res.result:
|
|
||||||
print(f"INSTALLED: {res.target}")
|
|
||||||
elif res.action == 'enable' and res.result:
|
|
||||||
print(f"ENABLED: package was already installed")
|
|
||||||
else:
|
|
||||||
print(f"ERROR: {res.msg}")
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚫 NOT Available in Glob (Handle These)
|
|
||||||
|
|
||||||
### Legacy Functions That Don't Exist:
|
|
||||||
- `get_custom_nodes()` → Use `cnr_utils.get_all_nodepackages()`
|
|
||||||
- `load_nightly()` → Remove or stub
|
|
||||||
- `extract_nodes_from_workflow()` → Remove feature
|
|
||||||
- `gitclone_install()` → Use `repo_install()`
|
|
||||||
|
|
||||||
### Legacy Properties That Don't Exist:
|
|
||||||
- `active_nodes` → Use `installed_node_packages` + filter by `is_enabled`
|
|
||||||
- `cnr_map` → Use `cnr_utils.get_all_nodepackages()`
|
|
||||||
- `cnr_inactive_nodes` → Use `installed_node_packages` + filter by `is_disabled` and `is_from_cnr`
|
|
||||||
- `nightly_inactive_nodes` → Use `installed_node_packages` + filter by `is_disabled` and `is_nightly`
|
|
||||||
- `unknown_active_nodes` → Use `installed_node_packages` + filter by `is_enabled` and `is_unknown`
|
|
||||||
- `unknown_inactive_nodes` → Use `installed_node_packages` + filter by `is_disabled` and `is_unknown`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔄 Data Migration Examples
|
|
||||||
|
|
||||||
### Show Enabled Packages
|
|
||||||
```python
|
|
||||||
def show_enabled_packages():
|
|
||||||
enabled_packages = []
|
|
||||||
|
|
||||||
# Collect enabled packages
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
for pack in packages:
|
|
||||||
if pack.is_enabled:
|
|
||||||
enabled_packages.append(pack)
|
|
||||||
|
|
||||||
# Display with CNR info
|
|
||||||
for pack in enabled_packages:
|
|
||||||
if pack.is_from_cnr:
|
|
||||||
cnr_info = cnr_utils.get_nodepack(pack.id)
|
|
||||||
publisher = cnr_info.get('publisher', {}).get('name', 'Unknown') if cnr_info else 'Unknown'
|
|
||||||
print(f"[ ENABLED ] {pack.id:50} (author: {publisher}) [{pack.version}]")
|
|
||||||
elif pack.is_nightly:
|
|
||||||
print(f"[ ENABLED ] {pack.id:50} (nightly) [NIGHTLY]")
|
|
||||||
else:
|
|
||||||
print(f"[ ENABLED ] {pack.id:50} (unknown) [UNKNOWN]")
|
|
||||||
```
|
|
||||||
|
|
||||||
### Show Not-Installed Packages
|
|
||||||
```python
|
|
||||||
def show_not_installed_packages():
|
|
||||||
# Get installed package IDs
|
|
||||||
installed_ids = set()
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
for pack in packages:
|
|
||||||
installed_ids.add(pack.id)
|
|
||||||
|
|
||||||
# Get all CNR packages
|
|
||||||
all_cnr = cnr_utils.get_all_nodepackages()
|
|
||||||
|
|
||||||
# Show not-installed
|
|
||||||
for pack_id, pack_info in all_cnr.items():
|
|
||||||
if pack_id not in installed_ids:
|
|
||||||
publisher = pack_info.get('publisher', {}).get('name', 'Unknown')
|
|
||||||
latest_version = pack_info.get('latest_version', {}).get('version', '0.0.0')
|
|
||||||
print(f"[ NOT INSTALLED ] {pack_info['name']:50} {pack_id:30} (author: {publisher}) [{latest_version}]")
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ⚠️ Key Constraints
|
|
||||||
|
|
||||||
1. **NO MODIFICATIONS**: Do not add any functions or properties to glob module
|
|
||||||
2. **USE EXISTING APIs**: Only use the functions and classes documented above
|
|
||||||
3. **ADAPT CLI**: CLI must adapt to glob's data structures and patterns
|
|
||||||
4. **REMOVE IF NEEDED**: Remove features that can't be implemented with available APIs
|
|
||||||
|
|
||||||
This reference should provide everything needed to implement the CLI migration using only existing glob APIs.
|
|
||||||
@@ -1,324 +0,0 @@
|
|||||||
# CLI Glob Migration - Implementation Todo List
|
|
||||||
|
|
||||||
## 📅 Project Timeline: 3.5 Days
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🚀 Phase 1: Initial Setup & Import Changes (0.5 day)
|
|
||||||
|
|
||||||
## Day 1 Morning
|
|
||||||
|
|
||||||
### ✅ Setup and Preparation (30 min)
|
|
||||||
- [ ] Read implementation context file
|
|
||||||
- [ ] Review glob APIs documentation
|
|
||||||
- [ ] Set up development environment
|
|
||||||
- [ ] Create backup of current CLI
|
|
||||||
|
|
||||||
### 🔄 Import Path Changes (1 hour)
|
|
||||||
- [ ] **CRITICAL**: Update import statements in `cm_cli/__main__.py:39-41`
|
|
||||||
```python
|
|
||||||
# Change from:
|
|
||||||
from ..legacy import manager_core as core
|
|
||||||
from ..legacy.manager_core import unified_manager
|
|
||||||
|
|
||||||
# Change to:
|
|
||||||
from ..glob import manager_core as core
|
|
||||||
from ..glob.manager_core import unified_manager
|
|
||||||
```
|
|
||||||
- [ ] Test CLI loads without crashing
|
|
||||||
- [ ] Identify immediate import-related errors
|
|
||||||
|
|
||||||
### 🧪 Initial Testing (30 min)
|
|
||||||
- [ ] Test basic CLI help: `python -m comfyui_manager.cm_cli help`
|
|
||||||
- [ ] Test simple commands that should work: `python -m comfyui_manager.cm_cli show snapshot`
|
|
||||||
- [ ] Document all errors found
|
|
||||||
- [ ] Prioritize fixes needed
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# ⚙️ Phase 2: Core Function Implementation (2 days)
|
|
||||||
|
|
||||||
## Day 1 Afternoon + Day 2
|
|
||||||
|
|
||||||
### 🛠️ install_node() Function Update (3 hours)
|
|
||||||
**File**: `cm_cli/__main__.py:187-235`
|
|
||||||
**Complexity**: Medium
|
|
||||||
|
|
||||||
#### Tasks:
|
|
||||||
- [ ] **Replace Git URL handling logic**
|
|
||||||
```python
|
|
||||||
# OLD (line ~191):
|
|
||||||
if core.is_valid_url(node_spec_str):
|
|
||||||
res = asyncio.run(core.gitclone_install(node_spec_str, no_deps=cmd_ctx.no_deps))
|
|
||||||
|
|
||||||
# NEW:
|
|
||||||
if unified_manager.is_url_like(node_spec_str):
|
|
||||||
repo_name = os.path.basename(node_spec_str)
|
|
||||||
if repo_name.endswith('.git'):
|
|
||||||
repo_name = repo_name[:-4]
|
|
||||||
res = asyncio.run(unified_manager.repo_install(
|
|
||||||
node_spec_str, repo_name, instant_execution=True, no_deps=cmd_ctx.no_deps
|
|
||||||
))
|
|
||||||
```
|
|
||||||
- [ ] Test Git URL installation
|
|
||||||
- [ ] Test CNR package installation
|
|
||||||
- [ ] Verify error handling works correctly
|
|
||||||
- [ ] Update progress messages if needed
|
|
||||||
|
|
||||||
### 🔍 show_list() Function Rewrite - Installed-Only Approach (3 hours)
|
|
||||||
**File**: `cm_cli/__main__.py:418-534`
|
|
||||||
**Complexity**: High - Complete architectural change
|
|
||||||
**New Approach**: Show only installed nodepacks with on-demand info retrieval
|
|
||||||
|
|
||||||
#### Key Changes:
|
|
||||||
- ❌ Remove: Full cache loading (`get_custom_nodes()`)
|
|
||||||
- ❌ Remove: Support for `show all`, `show not-installed`, `show cnr`
|
|
||||||
- ✅ Add: Lightweight caching system for nodepack metadata
|
|
||||||
- ✅ Add: On-demand CNR API calls for additional info
|
|
||||||
|
|
||||||
#### Tasks:
|
|
||||||
- [ ] **Phase 2A: Lightweight Cache Implementation (1 hour)**
|
|
||||||
```python
|
|
||||||
class NodePackageCache:
|
|
||||||
def __init__(self, cache_file_path: str):
|
|
||||||
self.cache_file = cache_file_path
|
|
||||||
self.cache_data = self._load_cache()
|
|
||||||
|
|
||||||
def get_metadata(self, nodepack_id: str) -> dict:
|
|
||||||
# Get cached metadata or fetch on-demand from CNR
|
|
||||||
|
|
||||||
def update_metadata(self, nodepack_id: str, metadata: dict):
|
|
||||||
# Update cache (called during install)
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Phase 2B: New show_list Implementation (1.5 hours)**
|
|
||||||
```python
|
|
||||||
def show_list(kind, simple=False):
|
|
||||||
# Validate supported commands
|
|
||||||
if kind not in ['installed', 'enabled', 'disabled']:
|
|
||||||
print(f"Unsupported: 'show {kind}'. Use: installed/enabled/disabled")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get installed packages only
|
|
||||||
all_packages = []
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
all_packages.extend(packages)
|
|
||||||
|
|
||||||
# Filter by status
|
|
||||||
if kind == 'enabled':
|
|
||||||
packages = [pkg for pkg in all_packages if pkg.is_enabled]
|
|
||||||
elif kind == 'disabled':
|
|
||||||
packages = [pkg for pkg in all_packages if not pkg.is_enabled]
|
|
||||||
else: # 'installed'
|
|
||||||
packages = all_packages
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Phase 2C: On-Demand Display with Cache (0.5 hour)**
|
|
||||||
```python
|
|
||||||
cache = NodePackageCache(cache_file_path)
|
|
||||||
|
|
||||||
for package in packages:
|
|
||||||
# Basic info from InstalledNodePackage
|
|
||||||
status = "[ ENABLED ]" if package.is_enabled else "[ DISABLED ]"
|
|
||||||
|
|
||||||
# Enhanced info from cache or on-demand
|
|
||||||
cached_info = cache.get_metadata(package.id)
|
|
||||||
name = cached_info.get('name', package.id)
|
|
||||||
author = cached_info.get('author', 'Unknown')
|
|
||||||
version = cached_info.get('version', 'Unknown')
|
|
||||||
|
|
||||||
if simple:
|
|
||||||
print(f"{name}@{version}")
|
|
||||||
else:
|
|
||||||
print(f"{status} {name:50} {package.id:30} (author: {author:20}) [{version}]")
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Install-time Cache Update:
|
|
||||||
- [ ] **Update install_node() to populate cache**
|
|
||||||
```python
|
|
||||||
# After successful installation in install_node()
|
|
||||||
if install_success:
|
|
||||||
metadata = cnr_utils.get_nodepackage_info(installed_package.id)
|
|
||||||
cache.update_metadata(installed_package.id, metadata)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Testing:
|
|
||||||
- [ ] Test `show installed` (enabled + disabled packages)
|
|
||||||
- [ ] Test `show enabled` (only enabled packages)
|
|
||||||
- [ ] Test `show disabled` (only disabled packages)
|
|
||||||
- [ ] Test unsupported commands show helpful error
|
|
||||||
- [ ] Test `simple-show` variants work correctly
|
|
||||||
- [ ] Test cache functionality (create, read, update)
|
|
||||||
- [ ] Test on-demand CNR info retrieval for cache misses
|
|
||||||
|
|
||||||
### 📝 get_all_installed_node_specs() Update (1 hour)
|
|
||||||
**File**: `cm_cli/__main__.py:573-605`
|
|
||||||
**Complexity**: Medium
|
|
||||||
|
|
||||||
#### Tasks:
|
|
||||||
- [ ] **Rewrite using InstalledNodePackage**
|
|
||||||
```python
|
|
||||||
def get_all_installed_node_specs():
|
|
||||||
res = []
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
for pack in packages:
|
|
||||||
node_spec_str = f"{pack.id}@{pack.version}"
|
|
||||||
res.append(node_spec_str)
|
|
||||||
return res
|
|
||||||
```
|
|
||||||
- [ ] Test with `update all` command
|
|
||||||
- [ ] Verify node spec format is correct
|
|
||||||
|
|
||||||
### ⚙️ Context Management Updates (1 hour)
|
|
||||||
**File**: `cm_cli/__main__.py:117-134`
|
|
||||||
**Complexity**: Low
|
|
||||||
|
|
||||||
#### Tasks:
|
|
||||||
- [ ] **Remove load_nightly() call**
|
|
||||||
```python
|
|
||||||
def set_channel_mode(self, channel, mode):
|
|
||||||
if mode is not None:
|
|
||||||
self.mode = mode
|
|
||||||
if channel is not None:
|
|
||||||
self.channel = channel
|
|
||||||
|
|
||||||
# OLD: asyncio.run(unified_manager.reload(...))
|
|
||||||
# OLD: asyncio.run(unified_manager.load_nightly(...))
|
|
||||||
|
|
||||||
# NEW: Just reload
|
|
||||||
unified_manager.reload()
|
|
||||||
```
|
|
||||||
- [ ] Test channel/mode switching still works
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🧹 Phase 3: Feature Removal & Final Testing (1 day)
|
|
||||||
|
|
||||||
## Day 3
|
|
||||||
|
|
||||||
### ❌ Remove Unavailable Features (2 hours)
|
|
||||||
**Complexity**: Low
|
|
||||||
|
|
||||||
#### deps-in-workflow Command Removal:
|
|
||||||
- [ ] **Update deps_in_workflow() function** (`cm_cli/__main__.py:1000-1050`)
|
|
||||||
```python
|
|
||||||
@app.command("deps-in-workflow")
|
|
||||||
def deps_in_workflow(...):
|
|
||||||
print("[bold red]ERROR: This feature is not available in the current version.[/bold red]")
|
|
||||||
print("The 'deps-in-workflow' feature has been removed.")
|
|
||||||
print("Please use alternative workflow analysis tools.")
|
|
||||||
sys.exit(1)
|
|
||||||
```
|
|
||||||
- [ ] Test command shows proper error message
|
|
||||||
- [ ] Update help text to reflect removal
|
|
||||||
|
|
||||||
#### install-deps Command Update:
|
|
||||||
- [ ] **Update install_deps() function** (`cm_cli/__main__.py:1203-1250`)
|
|
||||||
```python
|
|
||||||
# Remove extract_nodes_from_workflow usage (line ~1033)
|
|
||||||
# Replace with error handling or alternative approach
|
|
||||||
```
|
|
||||||
- [ ] Test with dependency files
|
|
||||||
|
|
||||||
### 🧪 Comprehensive Testing (4 hours)
|
|
||||||
|
|
||||||
#### Core Command Testing (2 hours):
|
|
||||||
- [ ] **Install Commands**:
|
|
||||||
- [ ] `install <cnr-package>`
|
|
||||||
- [ ] `install <git-url>`
|
|
||||||
- [ ] `install all` (if applicable)
|
|
||||||
|
|
||||||
- [ ] **Uninstall Commands**:
|
|
||||||
- [ ] `uninstall <package>`
|
|
||||||
- [ ] `uninstall all`
|
|
||||||
|
|
||||||
- [ ] **Enable/Disable Commands**:
|
|
||||||
- [ ] `enable <package>`
|
|
||||||
- [ ] `disable <package>`
|
|
||||||
- [ ] `enable all` / `disable all`
|
|
||||||
|
|
||||||
- [ ] **Update Commands**:
|
|
||||||
- [ ] `update <package>`
|
|
||||||
- [ ] `update all`
|
|
||||||
|
|
||||||
#### Show Commands Testing (1 hour):
|
|
||||||
- [ ] `show installed`
|
|
||||||
- [ ] `show enabled`
|
|
||||||
- [ ] `show disabled`
|
|
||||||
- [ ] `show all`
|
|
||||||
- [ ] `show not-installed`
|
|
||||||
- [ ] `simple-show` variants
|
|
||||||
|
|
||||||
#### Advanced Features Testing (1 hour):
|
|
||||||
- [ ] `save-snapshot`
|
|
||||||
- [ ] `restore-snapshot`
|
|
||||||
- [ ] `show snapshot`
|
|
||||||
- [ ] `show snapshot-list`
|
|
||||||
- [ ] `clear`
|
|
||||||
- [ ] `cli-only-mode`
|
|
||||||
|
|
||||||
### 🐛 Bug Fixes & Polish (2 hours)
|
|
||||||
- [ ] Fix any errors found during testing
|
|
||||||
- [ ] Improve error messages
|
|
||||||
- [ ] Ensure output formatting consistency
|
|
||||||
- [ ] Performance optimization if needed
|
|
||||||
- [ ] Code cleanup and comments
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 📋 Daily Checklists
|
|
||||||
|
|
||||||
## End of Day 1 Checklist:
|
|
||||||
- [ ] Imports successfully changed
|
|
||||||
- [ ] Basic CLI loading works
|
|
||||||
- [ ] install_node() handles both CNR and Git URLs
|
|
||||||
- [ ] No critical crashes in core functions
|
|
||||||
|
|
||||||
## End of Day 2 Checklist:
|
|
||||||
- [ ] show_list() displays all package types correctly
|
|
||||||
- [ ] get_all_installed_node_specs() works with new data structure
|
|
||||||
- [ ] Context management updated
|
|
||||||
- [ ] Core functionality regression-free
|
|
||||||
|
|
||||||
## End of Day 3 Checklist:
|
|
||||||
- [ ] All CLI commands tested and working
|
|
||||||
- [ ] Removed features show appropriate messages
|
|
||||||
- [ ] Output format acceptable to users
|
|
||||||
- [ ] No glob module modifications made
|
|
||||||
- [ ] Ready for code review
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🎯 Success Criteria
|
|
||||||
|
|
||||||
## Must Pass:
|
|
||||||
- [ ] All core commands functional (install/uninstall/enable/disable/update)
|
|
||||||
- [ ] show commands display accurate information
|
|
||||||
- [ ] No modifications to glob module
|
|
||||||
- [ ] CLI code changes < 200 lines
|
|
||||||
- [ ] No critical regressions
|
|
||||||
|
|
||||||
## Bonus Points:
|
|
||||||
- [ ] Output format matches legacy closely
|
|
||||||
- [ ] Performance equals or exceeds legacy
|
|
||||||
- [ ] Error messages user-friendly
|
|
||||||
- [ ] Code is clean and maintainable
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🚨 Emergency Contacts & Resources
|
|
||||||
|
|
||||||
## If Stuck:
|
|
||||||
1. **Review**: `CLI_PURE_GLOB_MIGRATION_PLAN.md` for detailed technical specs
|
|
||||||
2. **Reference**: `CLI_IMPLEMENTATION_CONTEXT.md` for current state
|
|
||||||
3. **Debug**: Use `print()` statements to understand data structures
|
|
||||||
4. **Fallback**: Implement minimal working version first, polish later
|
|
||||||
|
|
||||||
## Key Files to Reference:
|
|
||||||
- `comfyui_manager/glob/manager_core.py` - UnifiedManager APIs
|
|
||||||
- `comfyui_manager/common/node_package.py` - InstalledNodePackage class
|
|
||||||
- `comfyui_manager/common/cnr_utils.py` - CNR utilities
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Remember**: Focus on making it work first, then making it perfect. The constraint is NO glob modifications - CLI must adapt to glob's way of doing things.
|
|
||||||
@@ -1,424 +0,0 @@
|
|||||||
# CLI Migration Guide: Legacy to Glob Module
|
|
||||||
|
|
||||||
**Status**: ✅ Completed (Historical Reference)
|
|
||||||
**Last Updated**: 2025-08-30
|
|
||||||
**Purpose**: Complete guide for migrating ComfyUI Manager CLI from legacy to glob module
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Table of Contents
|
|
||||||
|
|
||||||
1. [Overview](#overview)
|
|
||||||
2. [Legacy vs Glob Comparison](#legacy-vs-glob-comparison)
|
|
||||||
3. [Migration Strategy](#migration-strategy)
|
|
||||||
4. [Implementation Details](#implementation-details)
|
|
||||||
5. [Key Constraints](#key-constraints)
|
|
||||||
6. [API Reference](#api-reference-quick)
|
|
||||||
7. [Rollback Plan](#rollback-plan)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
### Objective
|
|
||||||
Migrate ComfyUI Manager CLI from legacy module to glob module using **only existing glob APIs** without modifying the glob module itself.
|
|
||||||
|
|
||||||
### Scope
|
|
||||||
- **Target File**: `comfyui_manager/cm_cli/__main__.py` (1305 lines)
|
|
||||||
- **Timeline**: 3.5 days
|
|
||||||
- **Approach**: Minimal CLI changes, maximum compatibility
|
|
||||||
- **Constraint**: ❌ NO glob module modifications
|
|
||||||
|
|
||||||
### Current State
|
|
||||||
```python
|
|
||||||
# Current imports (Lines 39-41)
|
|
||||||
from ..legacy import manager_core as core
|
|
||||||
from ..legacy.manager_core import unified_manager
|
|
||||||
|
|
||||||
# Target imports
|
|
||||||
from ..glob import manager_core as core
|
|
||||||
from ..glob.manager_core import unified_manager
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Legacy vs Glob Comparison
|
|
||||||
|
|
||||||
### Core Architecture Differences
|
|
||||||
|
|
||||||
#### Legacy Module (Current)
|
|
||||||
**Data Structure**: Dictionary-based global state
|
|
||||||
```python
|
|
||||||
unified_manager.active_nodes # Active nodes dict
|
|
||||||
unified_manager.unknown_active_nodes # Unknown active nodes
|
|
||||||
unified_manager.cnr_inactive_nodes # Inactive CNR nodes
|
|
||||||
unified_manager.nightly_inactive_nodes # Inactive nightly nodes
|
|
||||||
unified_manager.unknown_inactive_nodes # Unknown inactive nodes
|
|
||||||
unified_manager.cnr_map # CNR info mapping
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Glob Module (Target)
|
|
||||||
**Data Structure**: Object-oriented with InstalledNodePackage
|
|
||||||
```python
|
|
||||||
unified_manager.installed_node_packages # dict[str, list[InstalledNodePackage]]
|
|
||||||
unified_manager.repo_nodepack_map # dict[str, InstalledNodePackage]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Method Compatibility Matrix
|
|
||||||
|
|
||||||
| Method | Legacy | Glob | Status | Action |
|
|
||||||
|--------|--------|------|--------|--------|
|
|
||||||
| `unified_enable()` | ✅ | ✅ | Compatible | Direct mapping |
|
|
||||||
| `unified_disable()` | ✅ | ✅ | Compatible | Direct mapping |
|
|
||||||
| `unified_uninstall()` | ✅ | ✅ | Compatible | Direct mapping |
|
|
||||||
| `unified_update()` | ✅ | ✅ | Compatible | Direct mapping |
|
|
||||||
| `install_by_id()` | Sync | Async | Modified | Use asyncio.run() |
|
|
||||||
| `gitclone_install()` | ✅ | ❌ | Replaced | Use repo_install() |
|
|
||||||
| `get_custom_nodes()` | ✅ | ❌ | Removed | Use cnr_utils |
|
|
||||||
| `load_nightly()` | ✅ | ❌ | Removed | Not needed |
|
|
||||||
| `extract_nodes_from_workflow()` | ✅ | ❌ | Removed | Feature removed |
|
|
||||||
|
|
||||||
### InstalledNodePackage Class
|
|
||||||
|
|
||||||
```python
|
|
||||||
@dataclass
|
|
||||||
class InstalledNodePackage:
|
|
||||||
id: str # Package identifier
|
|
||||||
fullpath: str # Full filesystem path
|
|
||||||
disabled: bool # Disabled status
|
|
||||||
version: str # Version (nightly/unknown/x.y.z)
|
|
||||||
repo_url: str = None # Repository URL
|
|
||||||
|
|
||||||
# Properties
|
|
||||||
@property
|
|
||||||
def is_unknown(self) -> bool: return self.version == "unknown"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_nightly(self) -> bool: return self.version == "nightly"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_from_cnr(self) -> bool: return not (self.is_unknown or self.is_nightly)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_enabled(self) -> bool: return not self.disabled
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_disabled(self) -> bool: return self.disabled
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Migration Strategy
|
|
||||||
|
|
||||||
### Phase 1: Setup (0.5 day)
|
|
||||||
**Goal**: Basic migration with error identification
|
|
||||||
|
|
||||||
1. **Import Path Changes**
|
|
||||||
```python
|
|
||||||
# Change 2 lines
|
|
||||||
from ..glob import manager_core as core
|
|
||||||
from ..glob.manager_core import unified_manager
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Initial Testing**
|
|
||||||
- Run basic commands
|
|
||||||
- Identify breaking changes
|
|
||||||
- Document errors
|
|
||||||
|
|
||||||
3. **Error Analysis**
|
|
||||||
- List all affected functions
|
|
||||||
- Categorize by priority
|
|
||||||
- Plan fixes
|
|
||||||
|
|
||||||
### Phase 2: Core Implementation (2 days)
|
|
||||||
**Goal**: Adapt CLI to glob architecture
|
|
||||||
|
|
||||||
1. **install_node() Updates**
|
|
||||||
```python
|
|
||||||
# Replace gitclone_install with repo_install
|
|
||||||
if unified_manager.is_url_like(node_spec_str):
|
|
||||||
res = asyncio.run(unified_manager.repo_install(
|
|
||||||
node_spec_str,
|
|
||||||
os.path.basename(node_spec_str),
|
|
||||||
instant_execution=True,
|
|
||||||
no_deps=cmd_ctx.no_deps
|
|
||||||
))
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **show_list() Rewrite** (Most complex change)
|
|
||||||
- Migrate from dictionary-based to InstalledNodePackage-based
|
|
||||||
- Implement installed-only approach with optional CNR lookup
|
|
||||||
- See [show_list() Implementation](#show_list-implementation) section
|
|
||||||
|
|
||||||
3. **Context Management**
|
|
||||||
- Update get_all_installed_node_specs()
|
|
||||||
- Adapt to new data structures
|
|
||||||
|
|
||||||
4. **Data Structure Migration**
|
|
||||||
- Replace all active_nodes references
|
|
||||||
- Use installed_node_packages instead
|
|
||||||
|
|
||||||
### Phase 3: Final Testing (1 day)
|
|
||||||
**Goal**: Comprehensive validation
|
|
||||||
|
|
||||||
1. **Feature Removal**
|
|
||||||
- Remove deps-in-workflow (not supported)
|
|
||||||
- Stub unsupported features
|
|
||||||
|
|
||||||
2. **Testing**
|
|
||||||
- Test all CLI commands
|
|
||||||
- Verify output format
|
|
||||||
- Check edge cases
|
|
||||||
|
|
||||||
3. **Polish**
|
|
||||||
- Fix bugs
|
|
||||||
- Improve error messages
|
|
||||||
- Update help text
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Implementation Details
|
|
||||||
|
|
||||||
### show_list() Implementation
|
|
||||||
|
|
||||||
**Challenge**: Legacy uses multiple dictionaries, glob uses single InstalledNodePackage collection
|
|
||||||
|
|
||||||
**Solution**: Installed-only approach with on-demand CNR lookup
|
|
||||||
|
|
||||||
```python
|
|
||||||
def show_list(kind: str, simple: bool = False):
|
|
||||||
"""
|
|
||||||
Display node package list
|
|
||||||
|
|
||||||
Args:
|
|
||||||
kind: 'installed', 'enabled', 'disabled', 'all'
|
|
||||||
simple: If True, show simple format
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Get all installed packages
|
|
||||||
all_packages = []
|
|
||||||
for packages in unified_manager.installed_node_packages.values():
|
|
||||||
all_packages.extend(packages)
|
|
||||||
|
|
||||||
# Filter by kind
|
|
||||||
if kind == "enabled":
|
|
||||||
packages = [p for p in all_packages if p.is_enabled]
|
|
||||||
elif kind == "disabled":
|
|
||||||
packages = [p for p in all_packages if p.is_disabled]
|
|
||||||
elif kind == "installed" or kind == "all":
|
|
||||||
packages = all_packages
|
|
||||||
else:
|
|
||||||
print(f"Unknown kind: {kind}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Display
|
|
||||||
if simple:
|
|
||||||
for pkg in packages:
|
|
||||||
print(pkg.id)
|
|
||||||
else:
|
|
||||||
# Detailed display with CNR info on-demand
|
|
||||||
for pkg in packages:
|
|
||||||
status = "disabled" if pkg.disabled else "enabled"
|
|
||||||
version_info = f"v{pkg.version}" if pkg.version != "unknown" else "unknown"
|
|
||||||
|
|
||||||
print(f"[{status}] {pkg.id} ({version_info})")
|
|
||||||
|
|
||||||
# Optionally fetch CNR info for non-nightly packages
|
|
||||||
if pkg.is_from_cnr and not simple:
|
|
||||||
cnr_info = cnr_utils.get_nodepackage(pkg.id)
|
|
||||||
if cnr_info:
|
|
||||||
print(f" Description: {cnr_info.get('description', 'N/A')}")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Key Changes**:
|
|
||||||
1. Single source of truth: `installed_node_packages`
|
|
||||||
2. No separate active/inactive dictionaries
|
|
||||||
3. On-demand CNR lookup instead of pre-cached cnr_map
|
|
||||||
4. Filter by InstalledNodePackage properties
|
|
||||||
|
|
||||||
### Git Installation Migration
|
|
||||||
|
|
||||||
**Before (Legacy)**:
|
|
||||||
```python
|
|
||||||
if core.is_valid_url(node_spec_str):
|
|
||||||
res = asyncio.run(core.gitclone_install(
|
|
||||||
node_spec_str,
|
|
||||||
no_deps=cmd_ctx.no_deps
|
|
||||||
))
|
|
||||||
```
|
|
||||||
|
|
||||||
**After (Glob)**:
|
|
||||||
```python
|
|
||||||
if unified_manager.is_url_like(node_spec_str):
|
|
||||||
res = asyncio.run(unified_manager.repo_install(
|
|
||||||
node_spec_str,
|
|
||||||
os.path.basename(node_spec_str), # repo_path derived from URL
|
|
||||||
instant_execution=True, # Execute immediately
|
|
||||||
no_deps=cmd_ctx.no_deps # Respect --no-deps flag
|
|
||||||
))
|
|
||||||
```
|
|
||||||
|
|
||||||
### Async Function Handling
|
|
||||||
|
|
||||||
**Pattern**: Wrap async glob methods with asyncio.run()
|
|
||||||
|
|
||||||
```python
|
|
||||||
# install_by_id is async in glob
|
|
||||||
res = asyncio.run(unified_manager.install_by_id(
|
|
||||||
packname=node_name,
|
|
||||||
version_spec=version,
|
|
||||||
instant_execution=True,
|
|
||||||
no_deps=cmd_ctx.no_deps
|
|
||||||
))
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Key Constraints
|
|
||||||
|
|
||||||
### Hard Constraints (Cannot Change)
|
|
||||||
1. ❌ **No glob module modifications**
|
|
||||||
- Cannot add new methods to UnifiedManager
|
|
||||||
- Cannot add compatibility properties
|
|
||||||
- Must use existing APIs only
|
|
||||||
|
|
||||||
2. ❌ **No legacy dependencies**
|
|
||||||
- CLI must work without legacy module
|
|
||||||
- Clean break from old architecture
|
|
||||||
|
|
||||||
3. ❌ **Maintain CLI interface**
|
|
||||||
- Command syntax unchanged
|
|
||||||
- Output format similar (minor differences acceptable)
|
|
||||||
|
|
||||||
### Soft Constraints (Acceptable Trade-offs)
|
|
||||||
1. ✅ **Feature removal acceptable**
|
|
||||||
- deps-in-workflow feature can be removed
|
|
||||||
- Channel/mode support can be simplified
|
|
||||||
|
|
||||||
2. ✅ **Performance trade-offs acceptable**
|
|
||||||
- On-demand CNR lookup vs pre-cached
|
|
||||||
- Slight performance degradation acceptable
|
|
||||||
|
|
||||||
3. ✅ **Output format flexibility**
|
|
||||||
- Minor formatting differences acceptable
|
|
||||||
- Must remain readable and useful
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## API Reference (Quick)
|
|
||||||
|
|
||||||
### UnifiedManager Core Methods
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Installation
|
|
||||||
async def install_by_id(packname, version_spec, instant_execution, no_deps) -> ManagedResult
|
|
||||||
|
|
||||||
# Git/URL installation
|
|
||||||
async def repo_install(url, repo_path, instant_execution, no_deps) -> ManagedResult
|
|
||||||
|
|
||||||
# Enable/Disable
|
|
||||||
def unified_enable(packname, version_spec=None) -> ManagedResult
|
|
||||||
def unified_disable(packname) -> ManagedResult
|
|
||||||
|
|
||||||
# Update/Uninstall
|
|
||||||
def unified_update(packname, instant_execution, no_deps) -> ManagedResult
|
|
||||||
def unified_uninstall(packname) -> ManagedResult
|
|
||||||
|
|
||||||
# Query
|
|
||||||
def get_active_pack(packname) -> InstalledNodePackage | None
|
|
||||||
def get_inactive_pack(packname, version_spec) -> InstalledNodePackage | None
|
|
||||||
def resolve_node_spec(packname, guess_mode) -> NodeSpec
|
|
||||||
|
|
||||||
# Utility
|
|
||||||
def is_url_like(text) -> bool
|
|
||||||
```
|
|
||||||
|
|
||||||
### Data Access
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Installed packages
|
|
||||||
unified_manager.installed_node_packages: dict[str, list[InstalledNodePackage]]
|
|
||||||
|
|
||||||
# Repository mapping
|
|
||||||
unified_manager.repo_nodepack_map: dict[str, InstalledNodePackage]
|
|
||||||
```
|
|
||||||
|
|
||||||
### External Utilities
|
|
||||||
|
|
||||||
```python
|
|
||||||
# CNR utilities
|
|
||||||
from ..common import cnr_utils
|
|
||||||
cnr_utils.get_nodepackage(id) -> dict
|
|
||||||
cnr_utils.get_all_nodepackages() -> list[dict]
|
|
||||||
```
|
|
||||||
|
|
||||||
For complete API reference, see [CLI_API_REFERENCE.md](CLI_API_REFERENCE.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Rollback Plan
|
|
||||||
|
|
||||||
### If Migration Fails
|
|
||||||
|
|
||||||
1. **Immediate Rollback** (< 5 minutes)
|
|
||||||
```python
|
|
||||||
# Revert imports in __main__.py
|
|
||||||
from ..legacy import manager_core as core
|
|
||||||
from ..legacy.manager_core import unified_manager
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Verify Rollback**
|
|
||||||
```bash
|
|
||||||
# Test basic commands
|
|
||||||
cm-cli show installed
|
|
||||||
cm-cli install <package>
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Document Issues**
|
|
||||||
- Note what failed
|
|
||||||
- Gather error logs
|
|
||||||
- Plan fixes
|
|
||||||
|
|
||||||
### Risk Mitigation
|
|
||||||
|
|
||||||
1. **Backup**: Keep legacy module available
|
|
||||||
2. **Testing**: Comprehensive test suite before deployment
|
|
||||||
3. **Staging**: Test in non-production environment first
|
|
||||||
4. **Monitoring**: Watch for errors after deployment
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Success Criteria
|
|
||||||
|
|
||||||
### Must Pass (Blockers)
|
|
||||||
- ✅ All core commands functional (install, update, enable, disable, uninstall)
|
|
||||||
- ✅ Package information displays correctly
|
|
||||||
- ✅ No glob module modifications
|
|
||||||
- ✅ No critical regressions
|
|
||||||
|
|
||||||
### Should Pass (Important)
|
|
||||||
- ✅ Output format similar to legacy
|
|
||||||
- ✅ Performance comparable to legacy
|
|
||||||
- ✅ User-friendly error messages
|
|
||||||
- ✅ Help text updated
|
|
||||||
|
|
||||||
### Nice to Have
|
|
||||||
- ✅ Improved code structure
|
|
||||||
- ✅ Better error handling
|
|
||||||
- ✅ Type hints added
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Reference Documents
|
|
||||||
|
|
||||||
- **[CLI_API_REFERENCE.md](CLI_API_REFERENCE.md)** - Complete API documentation
|
|
||||||
- **[CLI_IMPLEMENTATION_CHECKLIST.md](CLI_IMPLEMENTATION_CHECKLIST.md)** - Step-by-step tasks
|
|
||||||
- **[CLI_TESTING_GUIDE.md](CLI_TESTING_GUIDE.md)** - Testing strategy
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
|
|
||||||
The CLI migration from legacy to glob module is achievable through systematic adaptation of CLI code to glob's object-oriented architecture. The key is respecting the constraint of no glob modifications while leveraging existing glob APIs effectively.
|
|
||||||
|
|
||||||
**Status**: This migration has been completed successfully. The CLI now uses glob module exclusively.
|
|
||||||
@@ -1,407 +0,0 @@
|
|||||||
# CLI Migration Testing Checklist
|
|
||||||
|
|
||||||
## 🧪 Testing Strategy Overview
|
|
||||||
**Approach**: Progressive testing at each implementation phase
|
|
||||||
**Tools**: Manual CLI testing, comparison with legacy behavior
|
|
||||||
**Environment**: ComfyUI development environment with test packages
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 📋 Phase 1 Testing (Import Changes)
|
|
||||||
|
|
||||||
## ✅ Basic CLI Loading (Must Pass)
|
|
||||||
```bash
|
|
||||||
# Test CLI loads without import errors
|
|
||||||
python -m comfyui_manager.cm_cli --help
|
|
||||||
python -m comfyui_manager.cm_cli help
|
|
||||||
|
|
||||||
# Expected: CLI help displays, no ImportError exceptions
|
|
||||||
```
|
|
||||||
|
|
||||||
## ✅ Simple Command Smoke Tests
|
|
||||||
```bash
|
|
||||||
# Commands that should work immediately
|
|
||||||
python -m comfyui_manager.cm_cli show snapshot
|
|
||||||
python -m comfyui_manager.cm_cli clear
|
|
||||||
|
|
||||||
# Expected: Commands execute, may show different output but no crashes
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🐛 Error Identification
|
|
||||||
- [ ] Document all import-related errors
|
|
||||||
- [ ] Identify which functions fail immediately
|
|
||||||
- [ ] Note any missing attributes/methods used by CLI
|
|
||||||
- [ ] List functions that need immediate attention
|
|
||||||
|
|
||||||
**Pass Criteria**: CLI loads and basic commands don't crash
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🔧 Phase 2 Testing (Core Functions)
|
|
||||||
|
|
||||||
## 🚀 Install Command Testing
|
|
||||||
|
|
||||||
### CNR Package Installation
|
|
||||||
```bash
|
|
||||||
# Test CNR package installation
|
|
||||||
python -m comfyui_manager.cm_cli install ComfyUI-Manager
|
|
||||||
python -m comfyui_manager.cm_cli install <known-cnr-package>
|
|
||||||
|
|
||||||
# Expected behaviors:
|
|
||||||
# - Package resolves correctly
|
|
||||||
# - Installation proceeds
|
|
||||||
# - Success/failure message displayed
|
|
||||||
# - Package appears in enabled state
|
|
||||||
```
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Install new CNR package
|
|
||||||
- [ ] Install already-installed package (should skip)
|
|
||||||
- [ ] Install non-existent package (should error gracefully)
|
|
||||||
- [ ] Install with `--no-deps` flag
|
|
||||||
|
|
||||||
### Git URL Installation
|
|
||||||
```bash
|
|
||||||
# Test Git URL installation
|
|
||||||
python -m comfyui_manager.cm_cli install https://github.com/user/repo.git
|
|
||||||
python -m comfyui_manager.cm_cli install https://github.com/user/repo
|
|
||||||
|
|
||||||
# Expected behaviors:
|
|
||||||
# - URL detected as Git repository
|
|
||||||
# - repo_install() method called
|
|
||||||
# - Installation proceeds or fails gracefully
|
|
||||||
```
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Install from Git URL with .git suffix
|
|
||||||
- [ ] Install from Git URL without .git suffix
|
|
||||||
- [ ] Install from invalid Git URL (should error)
|
|
||||||
- [ ] Install from private repository (may fail gracefully)
|
|
||||||
|
|
||||||
## 📊 Show Commands Testing
|
|
||||||
|
|
||||||
### Show Installed/Enabled
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli show installed
|
|
||||||
python -m comfyui_manager.cm_cli show enabled
|
|
||||||
|
|
||||||
# Expected: List of enabled packages with:
|
|
||||||
# - Package names
|
|
||||||
# - Version information
|
|
||||||
# - Author/publisher info where available
|
|
||||||
# - Correct status indicators
|
|
||||||
```
|
|
||||||
|
|
||||||
### Show Disabled/Not-Installed
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli show disabled
|
|
||||||
python -m comfyui_manager.cm_cli show not-installed
|
|
||||||
|
|
||||||
# Expected: Appropriate package lists with status
|
|
||||||
```
|
|
||||||
|
|
||||||
### Show All & Simple Mode
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli show all
|
|
||||||
python -m comfyui_manager.cm_cli simple-show all
|
|
||||||
|
|
||||||
# Expected: Comprehensive package list
|
|
||||||
# Simple mode should show condensed format
|
|
||||||
```
|
|
||||||
|
|
||||||
**Detailed Test Matrix**:
|
|
||||||
- [ ] `show installed` - displays all installed packages
|
|
||||||
- [ ] `show enabled` - displays only enabled packages
|
|
||||||
- [ ] `show disabled` - displays only disabled packages
|
|
||||||
- [ ] `show not-installed` - displays available but not installed packages
|
|
||||||
- [ ] `show all` - displays comprehensive list
|
|
||||||
- [ ] `show cnr` - displays CNR packages only
|
|
||||||
- [ ] `simple-show` variants - condensed output format
|
|
||||||
|
|
||||||
**Validation Criteria**:
|
|
||||||
- [ ] Package counts make sense (enabled + disabled = installed)
|
|
||||||
- [ ] CNR packages show publisher information
|
|
||||||
- [ ] Nightly packages marked appropriately
|
|
||||||
- [ ] Unknown packages handled correctly
|
|
||||||
- [ ] No crashes with empty package sets
|
|
||||||
|
|
||||||
## ⚙️ Management Commands Testing
|
|
||||||
|
|
||||||
### Enable/Disable Commands
|
|
||||||
```bash
|
|
||||||
# Enable disabled package
|
|
||||||
python -m comfyui_manager.cm_cli disable <package-name>
|
|
||||||
python -m comfyui_manager.cm_cli show disabled # Should appear
|
|
||||||
python -m comfyui_manager.cm_cli enable <package-name>
|
|
||||||
python -m comfyui_manager.cm_cli show enabled # Should appear
|
|
||||||
|
|
||||||
# Test edge cases
|
|
||||||
python -m comfyui_manager.cm_cli enable <already-enabled-package> # Should skip
|
|
||||||
python -m comfyui_manager.cm_cli disable <non-existent-package> # Should error
|
|
||||||
```
|
|
||||||
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Enable disabled package
|
|
||||||
- [ ] Disable enabled package
|
|
||||||
- [ ] Enable already-enabled package (skip)
|
|
||||||
- [ ] Disable already-disabled package (skip)
|
|
||||||
- [ ] Enable non-existent package (error)
|
|
||||||
- [ ] Disable non-existent package (error)
|
|
||||||
|
|
||||||
### Uninstall Commands
|
|
||||||
```bash
|
|
||||||
# Uninstall package
|
|
||||||
python -m comfyui_manager.cm_cli uninstall <test-package>
|
|
||||||
python -m comfyui_manager.cm_cli show installed # Should not appear
|
|
||||||
|
|
||||||
# Test variations
|
|
||||||
python -m comfyui_manager.cm_cli uninstall <package>@unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Uninstall CNR package
|
|
||||||
- [ ] Uninstall nightly package
|
|
||||||
- [ ] Uninstall unknown package
|
|
||||||
- [ ] Uninstall non-existent package (should error gracefully)
|
|
||||||
|
|
||||||
### Update Commands
|
|
||||||
```bash
|
|
||||||
# Update specific package
|
|
||||||
python -m comfyui_manager.cm_cli update <package-name>
|
|
||||||
|
|
||||||
# Update all packages
|
|
||||||
python -m comfyui_manager.cm_cli update all
|
|
||||||
```
|
|
||||||
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Update single package
|
|
||||||
- [ ] Update all packages
|
|
||||||
- [ ] Update non-existent package (should error)
|
|
||||||
- [ ] Update already up-to-date package (should skip)
|
|
||||||
|
|
||||||
## 🗃️ Advanced Function Testing
|
|
||||||
|
|
||||||
### get_all_installed_node_specs()
|
|
||||||
```bash
|
|
||||||
# This function is used internally by update/enable/disable "all" commands
|
|
||||||
python -m comfyui_manager.cm_cli update all
|
|
||||||
python -m comfyui_manager.cm_cli enable all
|
|
||||||
python -m comfyui_manager.cm_cli disable all
|
|
||||||
|
|
||||||
# Expected: Commands process all installed packages
|
|
||||||
```
|
|
||||||
|
|
||||||
**Validation**:
|
|
||||||
- [ ] "all" commands process expected number of packages
|
|
||||||
- [ ] Package specs format correctly (name@version)
|
|
||||||
- [ ] No duplicates in package list
|
|
||||||
- [ ] All package types included (CNR, nightly, unknown)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🧹 Phase 3 Testing (Feature Removal & Polish)
|
|
||||||
|
|
||||||
## ❌ Removed Feature Testing
|
|
||||||
|
|
||||||
### deps-in-workflow Command
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli deps-in-workflow workflow.json deps.json
|
|
||||||
|
|
||||||
# Expected: Clear error message explaining feature removal
|
|
||||||
# Should NOT crash or show confusing errors
|
|
||||||
```
|
|
||||||
|
|
||||||
### install-deps Command (if affected)
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli install-deps deps.json
|
|
||||||
|
|
||||||
# Expected: Either works with alternative implementation or shows clear error
|
|
||||||
```
|
|
||||||
|
|
||||||
**Validation**:
|
|
||||||
- [ ] Error messages are user-friendly
|
|
||||||
- [ ] No stack traces for removed features
|
|
||||||
- [ ] Help text updated to reflect changes
|
|
||||||
- [ ] Alternative solutions mentioned where applicable
|
|
||||||
|
|
||||||
## 📸 Snapshot Functionality
|
|
||||||
|
|
||||||
### Save/Restore Snapshots
|
|
||||||
```bash
|
|
||||||
# Save snapshot
|
|
||||||
python -m comfyui_manager.cm_cli save-snapshot test-snapshot.json
|
|
||||||
ls snapshots/ # Should show new snapshot
|
|
||||||
|
|
||||||
# Restore snapshot
|
|
||||||
python -m comfyui_manager.cm_cli restore-snapshot test-snapshot.json
|
|
||||||
```
|
|
||||||
|
|
||||||
**Test Cases**:
|
|
||||||
- [ ] Save snapshot to default location
|
|
||||||
- [ ] Save snapshot to custom path
|
|
||||||
- [ ] Restore snapshot successfully
|
|
||||||
- [ ] Handle invalid snapshot files gracefully
|
|
||||||
|
|
||||||
### Snapshot Display
|
|
||||||
```bash
|
|
||||||
python -m comfyui_manager.cm_cli show snapshot
|
|
||||||
python -m comfyui_manager.cm_cli show snapshot-list
|
|
||||||
```
|
|
||||||
|
|
||||||
**Validation**:
|
|
||||||
- [ ] Current state displayed correctly
|
|
||||||
- [ ] Snapshot list shows available snapshots
|
|
||||||
- [ ] JSON format valid and readable
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🎯 Comprehensive Integration Testing
|
|
||||||
|
|
||||||
## 🔄 End-to-End Workflows
|
|
||||||
|
|
||||||
### Complete Package Lifecycle
|
|
||||||
```bash
|
|
||||||
# 1. Install package
|
|
||||||
python -m comfyui_manager.cm_cli install <test-package>
|
|
||||||
|
|
||||||
# 2. Verify installation
|
|
||||||
python -m comfyui_manager.cm_cli show enabled | grep <test-package>
|
|
||||||
|
|
||||||
# 3. Disable package
|
|
||||||
python -m comfyui_manager.cm_cli disable <test-package>
|
|
||||||
|
|
||||||
# 4. Verify disabled
|
|
||||||
python -m comfyui_manager.cm_cli show disabled | grep <test-package>
|
|
||||||
|
|
||||||
# 5. Re-enable package
|
|
||||||
python -m comfyui_manager.cm_cli enable <test-package>
|
|
||||||
|
|
||||||
# 6. Update package
|
|
||||||
python -m comfyui_manager.cm_cli update <test-package>
|
|
||||||
|
|
||||||
# 7. Uninstall package
|
|
||||||
python -m comfyui_manager.cm_cli uninstall <test-package>
|
|
||||||
|
|
||||||
# 8. Verify removal
|
|
||||||
python -m comfyui_manager.cm_cli show installed | grep <test-package> # Should be empty
|
|
||||||
```
|
|
||||||
|
|
||||||
### Batch Operations
|
|
||||||
```bash
|
|
||||||
# Install multiple packages
|
|
||||||
python -m comfyui_manager.cm_cli install package1 package2 package3
|
|
||||||
|
|
||||||
# Disable all packages
|
|
||||||
python -m comfyui_manager.cm_cli disable all
|
|
||||||
|
|
||||||
# Enable all packages
|
|
||||||
python -m comfyui_manager.cm_cli enable all
|
|
||||||
|
|
||||||
# Update all packages
|
|
||||||
python -m comfyui_manager.cm_cli update all
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🚨 Error Condition Testing
|
|
||||||
|
|
||||||
### Network/Connectivity Issues
|
|
||||||
- [ ] Test with no internet connection
|
|
||||||
- [ ] Test with slow internet connection
|
|
||||||
- [ ] Test with CNR API unavailable
|
|
||||||
|
|
||||||
### File System Issues
|
|
||||||
- [ ] Test with insufficient disk space
|
|
||||||
- [ ] Test with permission errors
|
|
||||||
- [ ] Test with corrupted package directories
|
|
||||||
|
|
||||||
### Invalid Input Handling
|
|
||||||
- [ ] Non-existent package names
|
|
||||||
- [ ] Invalid Git URLs
|
|
||||||
- [ ] Malformed command arguments
|
|
||||||
- [ ] Special characters in package names
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 📊 Performance & Regression Testing
|
|
||||||
|
|
||||||
## ⚡ Performance Comparison
|
|
||||||
```bash
|
|
||||||
# Time core operations
|
|
||||||
time python -m comfyui_manager.cm_cli show all
|
|
||||||
time python -m comfyui_manager.cm_cli install <test-package>
|
|
||||||
time python -m comfyui_manager.cm_cli update all
|
|
||||||
|
|
||||||
# Compare with legacy timings if available
|
|
||||||
```
|
|
||||||
|
|
||||||
**Validation**:
|
|
||||||
- [ ] Operations complete in reasonable time
|
|
||||||
- [ ] No significant performance regression
|
|
||||||
- [ ] Memory usage acceptable
|
|
||||||
|
|
||||||
## 🔄 Regression Testing
|
|
||||||
|
|
||||||
### Output Format Comparison
|
|
||||||
- [ ] Compare show command output with legacy version
|
|
||||||
- [ ] Document acceptable format differences
|
|
||||||
- [ ] Ensure essential information preserved
|
|
||||||
|
|
||||||
### Behavioral Consistency
|
|
||||||
- [ ] Command success/failure behavior matches legacy
|
|
||||||
- [ ] Error message quality comparable to legacy
|
|
||||||
- [ ] User experience remains smooth
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# ✅ Final Validation Checklist
|
|
||||||
|
|
||||||
## Must Pass (Blockers)
|
|
||||||
- [ ] All core commands functional (install/uninstall/enable/disable/update)
|
|
||||||
- [ ] Show commands display accurate package information
|
|
||||||
- [ ] No crashes or unhandled exceptions
|
|
||||||
- [ ] No modifications to glob module
|
|
||||||
- [ ] CLI loads and responds to help commands
|
|
||||||
|
|
||||||
## Should Pass (Important)
|
|
||||||
- [ ] Output format reasonably similar to legacy
|
|
||||||
- [ ] Performance comparable to legacy
|
|
||||||
- [ ] Error handling graceful and informative
|
|
||||||
- [ ] Removed features clearly communicated
|
|
||||||
|
|
||||||
## May Pass (Nice to Have)
|
|
||||||
- [ ] Output format identical to legacy
|
|
||||||
- [ ] Performance better than legacy
|
|
||||||
- [ ] Additional error recovery features
|
|
||||||
- [ ] Code improvements and cleanup
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# 🧰 Testing Tools & Commands
|
|
||||||
|
|
||||||
## Essential Test Commands
|
|
||||||
```bash
|
|
||||||
# Quick smoke test
|
|
||||||
python -m comfyui_manager.cm_cli --help
|
|
||||||
|
|
||||||
# Core functionality test
|
|
||||||
python -m comfyui_manager.cm_cli show all
|
|
||||||
|
|
||||||
# Package management test
|
|
||||||
python -m comfyui_manager.cm_cli install <safe-test-package>
|
|
||||||
|
|
||||||
# Cleanup test
|
|
||||||
python -m comfyui_manager.cm_cli uninstall <test-package>
|
|
||||||
```
|
|
||||||
|
|
||||||
## Debug Commands
|
|
||||||
```bash
|
|
||||||
# Check Python imports
|
|
||||||
python -c "from comfyui_manager.glob import manager_core; print('OK')"
|
|
||||||
|
|
||||||
# Check data structures
|
|
||||||
python -c "from comfyui_manager.glob.manager_core import unified_manager; print(len(unified_manager.installed_node_packages))"
|
|
||||||
|
|
||||||
# Check CNR access
|
|
||||||
python -c "from comfyui_manager.common import cnr_utils; print(len(cnr_utils.get_all_nodepackages()))"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Use this checklist systematically during implementation to ensure comprehensive testing and validation of the CLI migration.
|
|
||||||
@@ -1,184 +0,0 @@
|
|||||||
# CLI Migration Documentation
|
|
||||||
|
|
||||||
**Status**: ✅ Completed (Historical Reference)
|
|
||||||
**Last Updated**: 2025-11-04
|
|
||||||
**Purpose**: Documentation for CLI migration from legacy to glob module (completed August 2025)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 Directory Overview
|
|
||||||
|
|
||||||
This directory contains consolidated documentation for the ComfyUI Manager CLI migration project. The migration successfully moved the CLI from the legacy module to the glob module without modifying glob module code.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 Documentation Files
|
|
||||||
|
|
||||||
### 🎯 **Comprehensive Guide**
|
|
||||||
- **[CLI_MIGRATION_GUIDE.md](CLI_MIGRATION_GUIDE.md)** (~800 lines)
|
|
||||||
- Complete migration guide with all technical details
|
|
||||||
- Legacy vs Glob comparison
|
|
||||||
- Implementation strategies
|
|
||||||
- Code examples and patterns
|
|
||||||
- **Read this first** for complete understanding
|
|
||||||
|
|
||||||
### 📖 **Implementation Resources**
|
|
||||||
- **[CLI_IMPLEMENTATION_CHECKLIST.md](CLI_IMPLEMENTATION_CHECKLIST.md)** (~350 lines)
|
|
||||||
- Step-by-step implementation tasks
|
|
||||||
- Daily breakdown (3.5 days)
|
|
||||||
- Testing checkpoints
|
|
||||||
- Completion criteria
|
|
||||||
|
|
||||||
- **[CLI_API_REFERENCE.md](CLI_API_REFERENCE.md)** (~300 lines)
|
|
||||||
- Quick API lookup guide
|
|
||||||
- UnifiedManager methods
|
|
||||||
- InstalledNodePackage structure
|
|
||||||
- Usage examples
|
|
||||||
|
|
||||||
- **[CLI_TESTING_GUIDE.md](CLI_TESTING_GUIDE.md)** (~400 lines)
|
|
||||||
- Comprehensive testing strategy
|
|
||||||
- Test scenarios and cases
|
|
||||||
- Validation procedures
|
|
||||||
- Rollback planning
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 Quick Start (For Reference)
|
|
||||||
|
|
||||||
### Understanding the Migration
|
|
||||||
|
|
||||||
1. **Start Here**: [CLI_MIGRATION_GUIDE.md](CLI_MIGRATION_GUIDE.md)
|
|
||||||
- Read sections: Overview → Legacy vs Glob → Migration Strategy
|
|
||||||
|
|
||||||
2. **API Reference**: [CLI_API_REFERENCE.md](CLI_API_REFERENCE.md)
|
|
||||||
- Use for quick API lookups during implementation
|
|
||||||
|
|
||||||
3. **Implementation**: [CLI_IMPLEMENTATION_CHECKLIST.md](CLI_IMPLEMENTATION_CHECKLIST.md)
|
|
||||||
- Follow step-by-step if re-implementing
|
|
||||||
|
|
||||||
4. **Testing**: [CLI_TESTING_GUIDE.md](CLI_TESTING_GUIDE.md)
|
|
||||||
- Reference for validation procedures
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 Migration Summary
|
|
||||||
|
|
||||||
### Objective Achieved
|
|
||||||
✅ Migrated CLI from `..legacy` to `..glob` imports using only existing glob APIs
|
|
||||||
|
|
||||||
### Key Accomplishments
|
|
||||||
- ✅ **Single file modified**: `comfyui_manager/cm_cli/__main__.py`
|
|
||||||
- ✅ **No glob modifications**: Used existing APIs only
|
|
||||||
- ✅ **All commands functional**: install, update, enable, disable, uninstall
|
|
||||||
- ✅ **show_list() rewritten**: Adapted to InstalledNodePackage architecture
|
|
||||||
- ✅ **Completed in**: 3.5 days as planned
|
|
||||||
|
|
||||||
### Major Changes
|
|
||||||
1. Import path updates (2 lines)
|
|
||||||
2. `install_node()` → use `repo_install()` for Git URLs
|
|
||||||
3. `show_list()` → rewritten for InstalledNodePackage
|
|
||||||
4. Data structure migration: dictionaries → objects
|
|
||||||
5. Removed unsupported features (deps-in-workflow)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 File Organization
|
|
||||||
|
|
||||||
```
|
|
||||||
docs/internal/cli_migration/
|
|
||||||
├── README.md (This file - Quick navigation)
|
|
||||||
├── CLI_MIGRATION_GUIDE.md (Complete guide - 800 lines)
|
|
||||||
├── CLI_IMPLEMENTATION_CHECKLIST.md (Task breakdown - 350 lines)
|
|
||||||
├── CLI_API_REFERENCE.md (API docs - 300 lines)
|
|
||||||
└── CLI_TESTING_GUIDE.md (Testing guide - 400 lines)
|
|
||||||
|
|
||||||
Total: 5 files, ~1,850 lines (consolidated from 9 files, ~2,400 lines)
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ✨ Documentation Improvements
|
|
||||||
|
|
||||||
### Before Consolidation (9 files)
|
|
||||||
- ❌ Duplicate content across multiple files
|
|
||||||
- ❌ Mixed languages (Korean/English)
|
|
||||||
- ❌ Unclear hierarchy
|
|
||||||
- ❌ Fragmented information
|
|
||||||
|
|
||||||
### After Consolidation (5 files)
|
|
||||||
- ✅ Single comprehensive guide
|
|
||||||
- ✅ All English
|
|
||||||
- ✅ Clear purpose per file
|
|
||||||
- ✅ Easy navigation
|
|
||||||
- ✅ No duplication
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔍 Key Constraints (Historical Reference)
|
|
||||||
|
|
||||||
### Hard Constraints
|
|
||||||
- ❌ NO modifications to glob module
|
|
||||||
- ❌ NO legacy dependencies post-migration
|
|
||||||
- ✅ CLI interface must remain unchanged
|
|
||||||
|
|
||||||
### Implementation Approach
|
|
||||||
- ✅ Adapt CLI code to glob architecture
|
|
||||||
- ✅ Use existing glob APIs only
|
|
||||||
- ✅ Minimal changes, maximum compatibility
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 Migration Statistics
|
|
||||||
|
|
||||||
| Metric | Value |
|
|
||||||
|--------|-------|
|
|
||||||
| **Duration** | 3.5 days |
|
|
||||||
| **Files Modified** | 1 (`__main__.py`) |
|
|
||||||
| **Lines Changed** | ~200 lines |
|
|
||||||
| **glob Modifications** | 0 (constraint met) |
|
|
||||||
| **Tests Passing** | 100% |
|
|
||||||
| **Features Removed** | 1 (deps-in-workflow) |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎓 Lessons Learned
|
|
||||||
|
|
||||||
### What Worked Well
|
|
||||||
1. **Consolidation First**: Understanding all legacy usage before coding
|
|
||||||
2. **API-First Design**: glob's clean API made migration straightforward
|
|
||||||
3. **Object-Oriented**: InstalledNodePackage simplified many operations
|
|
||||||
4. **No Glob Changes**: Constraint forced better CLI design
|
|
||||||
|
|
||||||
### Challenges Overcome
|
|
||||||
1. **show_list() Complexity**: Rewrote from scratch using new patterns
|
|
||||||
2. **Dictionary to Object**: Required rethinking data access patterns
|
|
||||||
3. **Async Handling**: Wrapped async methods appropriately
|
|
||||||
4. **Testing Without Mocks**: Relied on integration testing
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 Related Documentation
|
|
||||||
|
|
||||||
### Project Documentation
|
|
||||||
- [Main Documentation Index](/DOCUMENTATION_INDEX.md)
|
|
||||||
- [Contributing Guidelines](/CONTRIBUTING.md)
|
|
||||||
- [Development Guidelines](/CLAUDE.md)
|
|
||||||
|
|
||||||
### Package Documentation
|
|
||||||
- [glob Module Guide](/comfyui_manager/glob/CLAUDE.md)
|
|
||||||
- [Data Models](/comfyui_manager/data_models/README.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔗 Cross-References
|
|
||||||
|
|
||||||
**If you need to**:
|
|
||||||
- Understand glob APIs → [CLI_API_REFERENCE.md](CLI_API_REFERENCE.md)
|
|
||||||
- See implementation steps → [CLI_IMPLEMENTATION_CHECKLIST.md](CLI_IMPLEMENTATION_CHECKLIST.md)
|
|
||||||
- Run tests → [CLI_TESTING_GUIDE.md](CLI_TESTING_GUIDE.md)
|
|
||||||
- Understand full context → [CLI_MIGRATION_GUIDE.md](CLI_MIGRATION_GUIDE.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Status**: ✅ Migration Complete - Documentation Archived for Reference
|
|
||||||
**Next Review**: When similar migration projects are planned
|
|
||||||
@@ -1,328 +0,0 @@
|
|||||||
# Future Test Plans
|
|
||||||
|
|
||||||
**Type**: Planning Document (Future Tests)
|
|
||||||
**Status**: P1 tests COMPLETE ✅ - Additional scenarios remain planned
|
|
||||||
**Current Implementation Status**: See [tests/glob/README.md](../../../tests/glob/README.md)
|
|
||||||
|
|
||||||
**Last Updated**: 2025-11-06
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
This document contains test scenarios that are **planned but not yet implemented**. For currently implemented tests, see [tests/glob/README.md](../../../tests/glob/README.md).
|
|
||||||
|
|
||||||
**Currently Implemented**: 51 tests ✅ (includes all P1 complex scenarios)
|
|
||||||
**P1 Implementation**: COMPLETE ✅ (Phase 3.1, 5.1, 5.2, 5.3, 6)
|
|
||||||
**Planned in this document**: Additional scenarios for comprehensive coverage (P0, P2)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 Table of Contents
|
|
||||||
|
|
||||||
1. [Simple Test Scenarios](#simple-test-scenarios) - Additional basic API tests
|
|
||||||
2. [Complex Multi-Version Scenarios](#complex-multi-version-scenarios) - Advanced state management tests
|
|
||||||
3. [Priority Matrix](#priority-matrix) - Implementation priorities
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Simple Test Scenarios
|
|
||||||
|
|
||||||
These are straightforward single-version/type test scenarios that extend the existing test suite.
|
|
||||||
|
|
||||||
## 3. Error Handling Testing (Priority: Medium)
|
|
||||||
|
|
||||||
### Test 3.1: Install Non-existent Package
|
|
||||||
**Purpose**: Handle invalid package names
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Attempt to install with non-existent package ID
|
|
||||||
2. Verify appropriate error message
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ Error status returned
|
|
||||||
- ✓ Clear error message
|
|
||||||
- ✓ No server crash
|
|
||||||
|
|
||||||
### Test 3.2: Invalid Version Specification
|
|
||||||
**Purpose**: Handle non-existent version installation attempts
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Attempt to install with non-existent version (e.g., "99.99.99")
|
|
||||||
2. Verify error handling
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ Error status returned
|
|
||||||
- ✓ Clear error message
|
|
||||||
|
|
||||||
### Test 3.3: Permission Error Simulation
|
|
||||||
**Purpose**: Handle file system permission issues
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Set custom_nodes directory to read-only
|
|
||||||
2. Attempt package installation
|
|
||||||
3. Verify error handling
|
|
||||||
4. Restore permissions
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ Permission error detected
|
|
||||||
- ✓ Clear error message
|
|
||||||
- ✓ Partial installation rollback
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4. Dependency Management Testing (Priority: Medium)
|
|
||||||
|
|
||||||
### Test 4.1: Installation with Dependencies
|
|
||||||
**Purpose**: Automatic installation of dependencies from packages with requirements.txt
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Install package with dependencies
|
|
||||||
2. Verify requirements.txt processing
|
|
||||||
3. Verify dependency packages installed
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ requirements.txt executed
|
|
||||||
- ✓ Dependency packages installed
|
|
||||||
- ✓ Installation scripts executed
|
|
||||||
|
|
||||||
### Test 4.2: no_deps Flag Testing
|
|
||||||
**Purpose**: Verify option to skip dependency installation
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Install package with no_deps=true
|
|
||||||
2. Verify requirements.txt skipped
|
|
||||||
3. Verify installation scripts skipped
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ Dependency installation skipped
|
|
||||||
- ✓ Only package files installed
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5. Multi-package Management Testing (Priority: Medium)
|
|
||||||
|
|
||||||
### Test 5.1: Concurrent Multiple Package Installation
|
|
||||||
**Purpose**: Concurrent installation of multiple independent packages
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Add 3 different packages to queue
|
|
||||||
2. Start queue
|
|
||||||
3. Verify all packages installed
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ All packages installed successfully
|
|
||||||
- ✓ Installation order guaranteed
|
|
||||||
- ✓ Individual failures don't affect other packages
|
|
||||||
|
|
||||||
### Test 5.2: Same Package Concurrent Installation (Conflict Handling)
|
|
||||||
**Purpose**: Handle concurrent requests for same package
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Add same package to queue twice
|
|
||||||
2. Start queue
|
|
||||||
3. Verify duplicate handling
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ First installation successful
|
|
||||||
- ✓ Second request skipped
|
|
||||||
- ✓ Handled without errors
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6. Security Level Testing (Priority: Low)
|
|
||||||
|
|
||||||
### Test 6.1: Installation Restrictions by Security Level
|
|
||||||
**Purpose**: Allow/deny installation based on security_level settings
|
|
||||||
|
|
||||||
**Steps**:
|
|
||||||
1. Set security_level to "strong"
|
|
||||||
2. Attempt installation with non-CNR registered URL
|
|
||||||
3. Verify rejection
|
|
||||||
|
|
||||||
**Verification Items**:
|
|
||||||
- ✓ Security level validation
|
|
||||||
- ✓ Appropriate error message
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Complex Multi-Version Scenarios
|
|
||||||
|
|
||||||
These scenarios test complex interactions between multiple versions and types of the same package.
|
|
||||||
|
|
||||||
## Test Philosophy
|
|
||||||
|
|
||||||
### Real-World Scenarios
|
|
||||||
1. User switches from Nightly to CNR
|
|
||||||
2. Install both CNR and Nightly, activate only one
|
|
||||||
3. Keep multiple versions in .disabled/ and switch as needed
|
|
||||||
4. Other versions exist in disabled state during Update
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 7: Complex Version Switch Chains (Priority: High)
|
|
||||||
|
|
||||||
### Test 7.1: CNR Old Enabled → CNR New (Other Versions Disabled)
|
|
||||||
**Initial State:**
|
|
||||||
```
|
|
||||||
custom_nodes/:
|
|
||||||
└── ComfyUI_SigmoidOffsetScheduler/ (CNR 1.0.1)
|
|
||||||
.disabled/:
|
|
||||||
├── ComfyUI_SigmoidOffsetScheduler_1.0.0/
|
|
||||||
└── ComfyUI_SigmoidOffsetScheduler_nightly/
|
|
||||||
```
|
|
||||||
|
|
||||||
**Operation:** Install CNR v1.0.2 (version switch)
|
|
||||||
|
|
||||||
**Expected Result:**
|
|
||||||
```
|
|
||||||
custom_nodes/:
|
|
||||||
└── ComfyUI_SigmoidOffsetScheduler/ (CNR 1.0.2)
|
|
||||||
.disabled/:
|
|
||||||
├── ComfyUI_SigmoidOffsetScheduler_1.0.0/
|
|
||||||
├── ComfyUI_SigmoidOffsetScheduler_1.0.1/ (old enabled version)
|
|
||||||
└── ComfyUI_SigmoidOffsetScheduler_nightly/
|
|
||||||
```
|
|
||||||
|
|
||||||
**Verification Items:**
|
|
||||||
- ✓ Existing enabled version auto-disabled
|
|
||||||
- ✓ New version enabled
|
|
||||||
- ✓ All disabled versions maintained
|
|
||||||
- ✓ Version history managed
|
|
||||||
|
|
||||||
### Test 7.2: Version Switch Chain (Nightly → CNR Old → CNR New)
|
|
||||||
**Scenario:** Sequential version transitions
|
|
||||||
|
|
||||||
**Step 1:** Nightly enabled
|
|
||||||
**Step 2:** Switch to CNR 1.0.1
|
|
||||||
**Step 3:** Switch to CNR 1.0.2
|
|
||||||
|
|
||||||
**Verification Items:**
|
|
||||||
- ✓ Each transition step operates normally
|
|
||||||
- ✓ Version history accumulates
|
|
||||||
- ✓ Rollback-capable state maintained
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 8: Edge Cases & Error Scenarios (Priority: Medium)
|
|
||||||
|
|
||||||
### Test 8.1: Corrupted Package in .disabled/
|
|
||||||
**Situation:** Corrupted package exists in .disabled/
|
|
||||||
|
|
||||||
**Operation:** Attempt Enable
|
|
||||||
|
|
||||||
**Expected Result:**
|
|
||||||
- Clear error message
|
|
||||||
- Fallback to other version (if possible)
|
|
||||||
- System stability maintained
|
|
||||||
|
|
||||||
### Test 8.2: Name Collision in .disabled/
|
|
||||||
**Situation:** Package with same name already exists in .disabled/
|
|
||||||
|
|
||||||
**Operation:** Attempt Disable
|
|
||||||
|
|
||||||
**Expected Result:**
|
|
||||||
- Generate unique name (timestamp, etc.)
|
|
||||||
- No data loss
|
|
||||||
- All versions distinguishable
|
|
||||||
|
|
||||||
### Test 8.3: Enable Non-existent Version
|
|
||||||
**Situation:** Requested version not in .disabled/
|
|
||||||
|
|
||||||
**Operation:** Enable specific version
|
|
||||||
|
|
||||||
**Expected Result:**
|
|
||||||
- Clear error message
|
|
||||||
- Available version list provided
|
|
||||||
- Graceful failure
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Priority Matrix
|
|
||||||
|
|
||||||
**Note**: Phases 3, 4, 5, and 6 are now complete and documented in [tests/glob/README.md](../../../tests/glob/README.md). This matrix shows only planned future tests.
|
|
||||||
|
|
||||||
| Phase | Scenario | Priority | Status | Complexity | Real-World Frequency |
|
|
||||||
|-------|----------|----------|--------|------------|---------------------|
|
|
||||||
| 7 | Complex Version Switch Chains | P0 | 🔄 PARTIAL | High | High |
|
|
||||||
| 8 | Edge Cases & Error Scenarios | P2 | ⏳ PLANNED | High | Low |
|
|
||||||
| Simple | Error Handling (3.1-3.3) | P2 | ⏳ PLANNED | Medium | Medium |
|
|
||||||
| Simple | Dependency Management (4.1-4.2) | P2 | ⏳ PLANNED | Medium | Medium |
|
|
||||||
| Simple | Multi-package Management (5.1-5.2) | P2 | ⏳ PLANNED | Medium | Low |
|
|
||||||
| Simple | Security Level Testing (6.1) | P2 | ⏳ PLANNED | Low | Low |
|
|
||||||
|
|
||||||
**Priority Definitions:**
|
|
||||||
- **P0:** High priority (implement next) - Phase 7 Complex Version Switch
|
|
||||||
- **P1:** Medium priority - ✅ **ALL COMPLETE** (Phase 3, 4, 5, 6 - see tests/glob/README.md)
|
|
||||||
- **P2:** Low priority (implement as needed) - Simple tests and Phase 8
|
|
||||||
|
|
||||||
**Status Definitions:**
|
|
||||||
- 🔄 PARTIAL: Some tests implemented (Phase 7 has version switching tests in test_version_switching_comprehensive.py)
|
|
||||||
- ⏳ PLANNED: Not yet started
|
|
||||||
|
|
||||||
**Recommended Next Steps:**
|
|
||||||
1. **Phase 7 Remaining Tests** (P0) - Complex version switch chains with multiple disabled versions
|
|
||||||
2. **Simple Test Scenarios** (P2) - Error handling, dependency management, multi-package operations
|
|
||||||
3. **Phase 8** (P2) - Edge cases and error scenarios
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Implementation Notes
|
|
||||||
|
|
||||||
## Fixture Patterns
|
|
||||||
|
|
||||||
For multi-version tests, use these fixture patterns:
|
|
||||||
|
|
||||||
```python
|
|
||||||
@pytest.fixture
|
|
||||||
def setup_multi_disabled_cnr_and_nightly(api_client, custom_nodes_path):
|
|
||||||
"""
|
|
||||||
Install both CNR and Nightly in disabled state.
|
|
||||||
|
|
||||||
Pattern:
|
|
||||||
1. Install CNR → custom_nodes/
|
|
||||||
2. Disable CNR → .disabled/comfyui_sigmoidoffsetscheduler@1_0_2
|
|
||||||
3. Install Nightly → custom_nodes/
|
|
||||||
4. Disable Nightly → .disabled/comfyui_sigmoidoffsetscheduler@nightly
|
|
||||||
"""
|
|
||||||
# Implementation details in archived COMPLEX_SCENARIOS_TEST_PLAN.md
|
|
||||||
```
|
|
||||||
|
|
||||||
## Verification Helpers
|
|
||||||
|
|
||||||
Use these verification patterns:
|
|
||||||
|
|
||||||
```python
|
|
||||||
def verify_version_state(custom_nodes_path, expected_state):
|
|
||||||
"""
|
|
||||||
Verify package state matches expectations.
|
|
||||||
|
|
||||||
expected_state = {
|
|
||||||
'enabled': {'type': 'cnr' | 'nightly' | None, 'version': '1.0.2'},
|
|
||||||
'disabled': [
|
|
||||||
{'type': 'cnr', 'version': '1.0.1'},
|
|
||||||
{'type': 'nightly'}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
# Implementation details in archived COMPLEX_SCENARIOS_TEST_PLAN.md
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# References
|
|
||||||
|
|
||||||
## Archived Implementation Guides
|
|
||||||
|
|
||||||
Detailed implementation examples, code snippets, and fixtures are available in archived planning documents:
|
|
||||||
- `.claude/archive/docs_2025-11-04/COMPLEX_SCENARIOS_TEST_PLAN.md` - Complete implementation guide with code examples
|
|
||||||
- `.claude/archive/docs_2025-11-04/TEST_PLAN_ADDITIONAL.md` - Simple test scenarios
|
|
||||||
|
|
||||||
## Current Implementation
|
|
||||||
|
|
||||||
For currently implemented tests and their status:
|
|
||||||
- **[tests/glob/README.md](../../../tests/glob/README.md)** - Current test status and coverage
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**End of Future Test Plans**
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -15,12 +15,9 @@ comfy_path = os.environ.get('COMFYUI_PATH')
|
|||||||
git_exe_path = os.environ.get('GIT_EXE_PATH')
|
git_exe_path = os.environ.get('GIT_EXE_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
print("git_helper: environment variable 'COMFYUI_PATH' is not specified.")
|
print("\nWARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.", file=sys.stderr)
|
||||||
exit(-1)
|
comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(comfy_path, 'folder_paths.py')):
|
|
||||||
print("git_helper: '{comfy_path}' is not a valid 'COMFYUI_PATH' location.")
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
def download_url(url, dest_folder, filename=None):
|
def download_url(url, dest_folder, filename=None):
|
||||||
# Ensure the destination folder exists
|
# Ensure the destination folder exists
|
||||||
@@ -156,27 +153,27 @@ def switch_to_default_branch(repo):
|
|||||||
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
||||||
repo.git.checkout(default_branch)
|
repo.git.checkout(default_branch)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
# try checkout master
|
# try checkout master
|
||||||
# try checkout main if failed
|
# try checkout main if failed
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.main)
|
repo.git.checkout(repo.heads.main)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print("[ComfyUI Manager] Failed to switch to the default branch")
|
print("[ComfyUI Manager] Failed to switch to the default branch")
|
||||||
@@ -447,7 +444,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url)
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# fallback
|
# fallback
|
||||||
@@ -456,7 +453,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
@@ -467,7 +464,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
@@ -478,7 +475,7 @@ def restore_pip_snapshot(pips, options):
|
|||||||
res = 1
|
res = 1
|
||||||
try:
|
try:
|
||||||
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x])
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if res != 0:
|
if res != 0:
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -2,16 +2,20 @@
|
|||||||
|
|
||||||
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
This directory contains the Python backend modules that power ComfyUI-Manager, handling the core functionality of node management, downloading, security, and server operations.
|
||||||
|
|
||||||
## Directory Structure
|
|
||||||
- **glob/** - code for new cacheless ComfyUI-Manager
|
|
||||||
- **legacy/** - code for legacy ComfyUI-Manager
|
|
||||||
|
|
||||||
## Core Modules
|
## Core Modules
|
||||||
|
|
||||||
- **manager_core.py**: The central implementation of management functions, handling configuration, installation, updates, and node management.
|
- **manager_core.py**: The central implementation of management functions, handling configuration, installation, updates, and node management.
|
||||||
- **manager_server.py**: Implements server functionality and API endpoints for the web interface to interact with the backend.
|
- **manager_server.py**: Implements server functionality and API endpoints for the web interface to interact with the backend.
|
||||||
|
- **manager_downloader.py**: Handles downloading operations for models, extensions, and other resources.
|
||||||
|
- **manager_util.py**: Provides utility functions used throughout the system.
|
||||||
|
|
||||||
## Specialized Modules
|
## Specialized Modules
|
||||||
|
|
||||||
|
- **cm_global.py**: Maintains global variables and state management across the system.
|
||||||
|
- **cnr_utils.py**: Helper utilities for interacting with the custom node registry (CNR).
|
||||||
|
- **git_utils.py**: Git-specific utilities for repository operations.
|
||||||
|
- **node_package.py**: Handles the packaging and installation of node extensions.
|
||||||
|
- **security_check.py**: Implements the multi-level security system for installation safety.
|
||||||
- **share_3rdparty.py**: Manages integration with third-party sharing platforms.
|
- **share_3rdparty.py**: Manages integration with third-party sharing platforms.
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
@@ -34,11 +34,6 @@ variables = {}
|
|||||||
APIs = {}
|
APIs = {}
|
||||||
|
|
||||||
|
|
||||||
pip_overrides = {}
|
|
||||||
pip_blacklist = {}
|
|
||||||
pip_downgrade_blacklist = {}
|
|
||||||
|
|
||||||
|
|
||||||
def register_api(k, f):
|
def register_api(k, f):
|
||||||
global APIs
|
global APIs
|
||||||
APIs[k] = f
|
APIs[k] = f
|
||||||
@@ -6,16 +6,10 @@ import time
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from . import context
|
import manager_core
|
||||||
from . import manager_util
|
import manager_util
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import toml
|
import toml
|
||||||
import logging
|
|
||||||
from . import git_utils
|
|
||||||
from cachetools import TTLCache, cached
|
|
||||||
|
|
||||||
query_ttl_cache = TTLCache(maxsize=100, ttl=60)
|
|
||||||
|
|
||||||
base_url = "https://api.comfy.org"
|
base_url = "https://api.comfy.org"
|
||||||
|
|
||||||
@@ -24,34 +18,11 @@ lock = asyncio.Lock()
|
|||||||
|
|
||||||
is_cache_loading = False
|
is_cache_loading = False
|
||||||
|
|
||||||
|
|
||||||
def normalize_package_name(name: str) -> str:
|
|
||||||
"""
|
|
||||||
Normalize package name for case-insensitive matching.
|
|
||||||
|
|
||||||
This follows the same normalization pattern used throughout CNR:
|
|
||||||
- Strip leading/trailing whitespace
|
|
||||||
- Convert to lowercase
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: Package name to normalize (e.g., "ComfyUI_SigmoidOffsetScheduler" or " NodeName ")
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Normalized package name (e.g., "comfyui_sigmoidoffsetscheduler")
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
>>> normalize_package_name("ComfyUI_SigmoidOffsetScheduler")
|
|
||||||
"comfyui_sigmoidoffsetscheduler"
|
|
||||||
>>> normalize_package_name(" NodeName ")
|
|
||||||
"nodename"
|
|
||||||
"""
|
|
||||||
return name.strip().lower()
|
|
||||||
|
|
||||||
async def get_cnr_data(cache_mode=True, dont_wait=True):
|
async def get_cnr_data(cache_mode=True, dont_wait=True):
|
||||||
try:
|
try:
|
||||||
return await _get_cnr_data(cache_mode, dont_wait)
|
return await _get_cnr_data(cache_mode, dont_wait)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logging.info("A timeout occurred during the fetch process from ComfyRegistry.")
|
print("A timeout occurred during the fetch process from ComfyRegistry.")
|
||||||
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
|
return await _get_cnr_data(cache_mode=True, dont_wait=True) # timeout fallback
|
||||||
|
|
||||||
async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
||||||
@@ -64,6 +35,7 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
page = 1
|
page = 1
|
||||||
|
|
||||||
full_nodes = {}
|
full_nodes = {}
|
||||||
|
|
||||||
|
|
||||||
# Determine form factor based on environment and platform
|
# Determine form factor based on environment and platform
|
||||||
is_desktop = bool(os.environ.get('__COMFYUI_DESKTOP_VERSION__'))
|
is_desktop = bool(os.environ.get('__COMFYUI_DESKTOP_VERSION__'))
|
||||||
@@ -75,9 +47,9 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
# Get ComfyUI version tag
|
# Get ComfyUI version tag
|
||||||
if is_desktop:
|
if is_desktop:
|
||||||
# extract version from pyproject.toml instead of git tag
|
# extract version from pyproject.toml instead of git tag
|
||||||
comfyui_ver = context.get_current_comfyui_ver() or 'unknown'
|
comfyui_ver = manager_core.get_current_comfyui_ver() or 'unknown'
|
||||||
else:
|
else:
|
||||||
comfyui_ver = context.get_comfyui_tag() or 'unknown'
|
comfyui_ver = manager_core.get_comfyui_tag() or 'unknown'
|
||||||
|
|
||||||
if is_desktop:
|
if is_desktop:
|
||||||
if is_windows:
|
if is_windows:
|
||||||
@@ -106,12 +78,12 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
full_nodes[x['id']] = x
|
full_nodes[x['id']] = x
|
||||||
|
|
||||||
if page % 5 == 0:
|
if page % 5 == 0:
|
||||||
logging.info(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
|
print(f"FETCH ComfyRegistry Data: {page}/{sub_json_obj['totalPages']}")
|
||||||
|
|
||||||
page += 1
|
page += 1
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
logging.info("FETCH ComfyRegistry Data [DONE]")
|
print("FETCH ComfyRegistry Data [DONE]")
|
||||||
|
|
||||||
for v in full_nodes.values():
|
for v in full_nodes.values():
|
||||||
if 'latest_version' not in v:
|
if 'latest_version' not in v:
|
||||||
@@ -127,7 +99,7 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
if cache_state == 'not-cached':
|
if cache_state == 'not-cached':
|
||||||
return {}
|
return {}
|
||||||
else:
|
else:
|
||||||
logging.info("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
|
print("[ComfyUI-Manager] The ComfyRegistry cache update is still in progress, so an outdated cache is being used.")
|
||||||
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
with open(manager_util.get_cache_path(uri), 'r', encoding="UTF-8", errors="ignore") as json_file:
|
||||||
return json.load(json_file)['nodes']
|
return json.load(json_file)['nodes']
|
||||||
|
|
||||||
@@ -139,9 +111,9 @@ async def _get_cnr_data(cache_mode=True, dont_wait=True):
|
|||||||
json_obj = await fetch_all()
|
json_obj = await fetch_all()
|
||||||
manager_util.save_to_cache(uri, json_obj)
|
manager_util.save_to_cache(uri, json_obj)
|
||||||
return json_obj['nodes']
|
return json_obj['nodes']
|
||||||
except Exception:
|
except:
|
||||||
res = {}
|
res = {}
|
||||||
logging.warning("Cannot connect to comfyregistry.")
|
print("Cannot connect to comfyregistry.")
|
||||||
finally:
|
finally:
|
||||||
if cache_mode:
|
if cache_mode:
|
||||||
is_cache_loading = False
|
is_cache_loading = False
|
||||||
@@ -164,7 +136,7 @@ def map_node_version(api_node_version):
|
|||||||
Maps node version data from API response to NodeVersion dataclass.
|
Maps node version data from API response to NodeVersion dataclass.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
api_node_version (dict): The 'node_version' part of the API response.
|
api_data (dict): The 'node_version' part of the API response.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
NodeVersion: An instance of NodeVersion dataclass populated with data from the API.
|
NodeVersion: An instance of NodeVersion dataclass populated with data from the API.
|
||||||
@@ -215,80 +187,6 @@ def install_node(node_id, version=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@cached(query_ttl_cache)
|
|
||||||
def get_nodepack(packname):
|
|
||||||
"""
|
|
||||||
Retrieves the nodepack
|
|
||||||
|
|
||||||
Args:
|
|
||||||
packname (str): The unique identifier of the node.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
nodepack info {id, latest_version}
|
|
||||||
"""
|
|
||||||
url = f"{base_url}/nodes/{packname}"
|
|
||||||
|
|
||||||
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
|
||||||
if response.status_code == 200:
|
|
||||||
info = response.json()
|
|
||||||
|
|
||||||
res = {
|
|
||||||
'id': info['id']
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'latest_version' in info:
|
|
||||||
res['latest_version'] = info['latest_version']['version']
|
|
||||||
|
|
||||||
if 'repository' in info:
|
|
||||||
res['repository'] = info['repository']
|
|
||||||
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@cached(query_ttl_cache)
|
|
||||||
def get_nodepack_by_url(url):
|
|
||||||
"""
|
|
||||||
Retrieves the nodepack info for installation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url (str): The unique identifier of the node.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
NodeVersion: Node version data or error message.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# example query: https://api.comfy.org/nodes/search?repository_url_search=ltdrdata/ComfyUI-Impact-Pack&limit=1
|
|
||||||
url = f"nodes/search?repository_url_search={url}&limit=1"
|
|
||||||
|
|
||||||
response = requests.get(url, verify=not manager_util.bypass_ssl)
|
|
||||||
if response.status_code == 200:
|
|
||||||
# Convert the API response to a NodeVersion object
|
|
||||||
info = response.json().get('nodes', [])
|
|
||||||
if len(info) > 0:
|
|
||||||
info = info[0]
|
|
||||||
repo_url = info['repository']
|
|
||||||
|
|
||||||
if git_utils.compact_url(url) != git_utils.compact_url(repo_url):
|
|
||||||
return None
|
|
||||||
|
|
||||||
res = {
|
|
||||||
'id': info['id']
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'latest_version' in info:
|
|
||||||
res['latest_version'] = info['latest_version']['version']
|
|
||||||
|
|
||||||
res['repository'] = info['repository']
|
|
||||||
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def all_versions_of_node(node_id):
|
def all_versions_of_node(node_id):
|
||||||
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
url = f"{base_url}/nodes/{node_id}/versions?statuses=NodeVersionStatusActive&statuses=NodeVersionStatusPending"
|
||||||
|
|
||||||
@@ -311,7 +209,7 @@ def read_cnr_info(fullpath):
|
|||||||
data = toml.load(f)
|
data = toml.load(f)
|
||||||
|
|
||||||
project = data.get('project', {})
|
project = data.get('project', {})
|
||||||
name = project.get('name').strip()
|
name = project.get('name').strip().lower()
|
||||||
|
|
||||||
# normalize version
|
# normalize version
|
||||||
# for example: 2.5 -> 2.5.0
|
# for example: 2.5 -> 2.5.0
|
||||||
@@ -338,8 +236,8 @@ def generate_cnr_id(fullpath, cnr_id):
|
|||||||
if not os.path.exists(cnr_id_path):
|
if not os.path.exists(cnr_id_path):
|
||||||
with open(cnr_id_path, "w") as f:
|
with open(cnr_id_path, "w") as f:
|
||||||
return f.write(cnr_id)
|
return f.write(cnr_id)
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
print(f"[ComfyUI Manager] unable to create file: {cnr_id_path}")
|
||||||
|
|
||||||
|
|
||||||
def read_cnr_id(fullpath):
|
def read_cnr_id(fullpath):
|
||||||
@@ -348,7 +246,8 @@ def read_cnr_id(fullpath):
|
|||||||
if os.path.exists(cnr_id_path):
|
if os.path.exists(cnr_id_path):
|
||||||
with open(cnr_id_path) as f:
|
with open(cnr_id_path) as f:
|
||||||
return f.read().strip()
|
return f.read().strip()
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -77,14 +77,6 @@ def normalize_to_github_id(url) -> str:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def compact_url(url):
|
|
||||||
github_id = normalize_to_github_id(url)
|
|
||||||
if github_id is not None:
|
|
||||||
return github_id
|
|
||||||
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
def get_url_for_clone(url):
|
def get_url_for_clone(url):
|
||||||
url = normalize_url(url)
|
url = normalize_url(url)
|
||||||
|
|
||||||
@@ -23,6 +23,7 @@ import yaml
|
|||||||
import zipfile
|
import zipfile
|
||||||
import traceback
|
import traceback
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
import toml
|
||||||
|
|
||||||
orig_print = print
|
orig_print = print
|
||||||
|
|
||||||
@@ -31,21 +32,22 @@ from packaging import version
|
|||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from ..common import cm_global
|
glob_path = os.path.join(os.path.dirname(__file__)) # ComfyUI-Manager/glob
|
||||||
from ..common import cnr_utils
|
sys.path.append(glob_path)
|
||||||
from ..common import manager_util
|
|
||||||
from ..common import git_utils
|
import cm_global
|
||||||
from ..common import manager_downloader
|
import cnr_utils
|
||||||
from ..common.node_package import InstalledNodePackage
|
import manager_util
|
||||||
from ..common.enums import NetworkMode, SecurityLevel, DBMode
|
import git_utils
|
||||||
from ..common import context
|
import manager_downloader
|
||||||
|
from node_package import InstalledNodePackage
|
||||||
|
|
||||||
|
|
||||||
version_code = [5, 0]
|
version_code = [3, 37]
|
||||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/Comfy-Org/ComfyUI-Manager/main"
|
DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main"
|
||||||
|
|
||||||
|
|
||||||
default_custom_nodes_path = None
|
default_custom_nodes_path = None
|
||||||
@@ -56,14 +58,13 @@ class InvalidChannel(Exception):
|
|||||||
self.channel = channel
|
self.channel = channel
|
||||||
super().__init__(channel)
|
super().__init__(channel)
|
||||||
|
|
||||||
|
|
||||||
def get_default_custom_nodes_path():
|
def get_default_custom_nodes_path():
|
||||||
global default_custom_nodes_path
|
global default_custom_nodes_path
|
||||||
if default_custom_nodes_path is None:
|
if default_custom_nodes_path is None:
|
||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
default_custom_nodes_path = folder_paths.get_folder_paths("custom_nodes")[0]
|
default_custom_nodes_path = folder_paths.get_folder_paths("custom_nodes")[0]
|
||||||
except Exception:
|
except:
|
||||||
default_custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
default_custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
||||||
|
|
||||||
return default_custom_nodes_path
|
return default_custom_nodes_path
|
||||||
@@ -73,11 +74,37 @@ def get_custom_nodes_paths():
|
|||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
return folder_paths.get_folder_paths("custom_nodes")
|
return folder_paths.get_folder_paths("custom_nodes")
|
||||||
except Exception:
|
except:
|
||||||
custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
custom_nodes_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..'))
|
||||||
return [custom_nodes_path]
|
return [custom_nodes_path]
|
||||||
|
|
||||||
|
|
||||||
|
def get_comfyui_tag():
|
||||||
|
try:
|
||||||
|
repo = git.Repo(comfy_path)
|
||||||
|
return repo.git.describe('--tags')
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_comfyui_ver():
|
||||||
|
"""
|
||||||
|
Extract version from pyproject.toml
|
||||||
|
"""
|
||||||
|
toml_path = os.path.join(comfy_path, 'pyproject.toml')
|
||||||
|
if not os.path.exists(toml_path):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(toml_path, "r", encoding="utf-8") as f:
|
||||||
|
data = toml.load(f)
|
||||||
|
|
||||||
|
project = data.get('project', {})
|
||||||
|
return project.get('version')
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_script_env():
|
def get_script_env():
|
||||||
new_env = os.environ.copy()
|
new_env = os.environ.copy()
|
||||||
git_exe = get_config().get('git_exe')
|
git_exe = get_config().get('git_exe')
|
||||||
@@ -85,10 +112,10 @@ def get_script_env():
|
|||||||
new_env['GIT_EXE_PATH'] = git_exe
|
new_env['GIT_EXE_PATH'] = git_exe
|
||||||
|
|
||||||
if 'COMFYUI_PATH' not in new_env:
|
if 'COMFYUI_PATH' not in new_env:
|
||||||
new_env['COMFYUI_PATH'] = context.comfy_path
|
new_env['COMFYUI_PATH'] = comfy_path
|
||||||
|
|
||||||
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
||||||
new_env['COMFYUI_FOLDERS_BASE_PATH'] = context.comfy_path
|
new_env['COMFYUI_FOLDERS_BASE_PATH'] = comfy_path
|
||||||
|
|
||||||
return new_env
|
return new_env
|
||||||
|
|
||||||
@@ -110,12 +137,12 @@ def check_invalid_nodes():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import folder_paths
|
import folder_paths
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
sys.path.append(context.comfy_path)
|
sys.path.append(comfy_path)
|
||||||
import folder_paths
|
import folder_paths
|
||||||
except Exception:
|
except:
|
||||||
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {context.comfy_path}")
|
raise Exception(f"Invalid COMFYUI_FOLDERS_BASE_PATH: {comfy_path}")
|
||||||
|
|
||||||
def check(root):
|
def check(root):
|
||||||
global invalid_nodes
|
global invalid_nodes
|
||||||
@@ -150,6 +177,75 @@ def check_invalid_nodes():
|
|||||||
print("\n---------------------------------------------------------------------------\n")
|
print("\n---------------------------------------------------------------------------\n")
|
||||||
|
|
||||||
|
|
||||||
|
# read env vars
|
||||||
|
comfy_path: str = os.environ.get('COMFYUI_PATH')
|
||||||
|
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
||||||
|
|
||||||
|
if comfy_path is None:
|
||||||
|
try:
|
||||||
|
import folder_paths
|
||||||
|
comfy_path = os.path.join(os.path.dirname(folder_paths.__file__))
|
||||||
|
except:
|
||||||
|
comfy_path = os.path.abspath(os.path.join(manager_util.comfyui_manager_path, '..', '..'))
|
||||||
|
|
||||||
|
if comfy_base_path is None:
|
||||||
|
comfy_base_path = comfy_path
|
||||||
|
|
||||||
|
|
||||||
|
channel_list_template_path = os.path.join(manager_util.comfyui_manager_path, 'channels.list.template')
|
||||||
|
git_script_path = os.path.join(manager_util.comfyui_manager_path, "git_helper.py")
|
||||||
|
|
||||||
|
manager_files_path = None
|
||||||
|
manager_config_path = None
|
||||||
|
manager_channel_list_path = None
|
||||||
|
manager_startup_script_path:str = None
|
||||||
|
manager_snapshot_path = None
|
||||||
|
manager_pip_overrides_path = None
|
||||||
|
manager_pip_blacklist_path = None
|
||||||
|
manager_components_path = None
|
||||||
|
|
||||||
|
def update_user_directory(user_dir):
|
||||||
|
global manager_files_path
|
||||||
|
global manager_config_path
|
||||||
|
global manager_channel_list_path
|
||||||
|
global manager_startup_script_path
|
||||||
|
global manager_snapshot_path
|
||||||
|
global manager_pip_overrides_path
|
||||||
|
global manager_pip_blacklist_path
|
||||||
|
global manager_components_path
|
||||||
|
|
||||||
|
manager_files_path = os.path.abspath(os.path.join(user_dir, 'default', 'ComfyUI-Manager'))
|
||||||
|
if not os.path.exists(manager_files_path):
|
||||||
|
os.makedirs(manager_files_path)
|
||||||
|
|
||||||
|
manager_snapshot_path = os.path.join(manager_files_path, "snapshots")
|
||||||
|
if not os.path.exists(manager_snapshot_path):
|
||||||
|
os.makedirs(manager_snapshot_path)
|
||||||
|
|
||||||
|
manager_startup_script_path = os.path.join(manager_files_path, "startup-scripts")
|
||||||
|
if not os.path.exists(manager_startup_script_path):
|
||||||
|
os.makedirs(manager_startup_script_path)
|
||||||
|
|
||||||
|
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
||||||
|
manager_channel_list_path = os.path.join(manager_files_path, 'channels.list')
|
||||||
|
manager_pip_overrides_path = os.path.join(manager_files_path, "pip_overrides.json")
|
||||||
|
manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.list")
|
||||||
|
manager_components_path = os.path.join(manager_files_path, "components")
|
||||||
|
manager_util.cache_dir = os.path.join(manager_files_path, "cache")
|
||||||
|
|
||||||
|
if not os.path.exists(manager_util.cache_dir):
|
||||||
|
os.makedirs(manager_util.cache_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import folder_paths
|
||||||
|
update_user_directory(folder_paths.get_user_directory())
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# fallback:
|
||||||
|
# This case is only possible when running with cm-cli, and in practice, this case is not actually used.
|
||||||
|
update_user_directory(os.path.abspath(manager_util.comfyui_manager_path))
|
||||||
|
|
||||||
|
|
||||||
cached_config = None
|
cached_config = None
|
||||||
js_path = None
|
js_path = None
|
||||||
|
|
||||||
@@ -525,7 +621,7 @@ class UnifiedManager:
|
|||||||
ver = str(manager_util.StrictVersion(info['version']))
|
ver = str(manager_util.StrictVersion(info['version']))
|
||||||
return {'id': cnr['id'], 'cnr': cnr, 'ver': ver}
|
return {'id': cnr['id'], 'cnr': cnr, 'ver': ver}
|
||||||
else:
|
else:
|
||||||
return {'id': info['id'], 'ver': info['version']}
|
return None
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -701,9 +797,7 @@ class UnifiedManager:
|
|||||||
|
|
||||||
return latest
|
return latest
|
||||||
|
|
||||||
async def reload(self, cache_mode, dont_wait=True, update_cnr_map=True):
|
async def reload(self, cache_mode, dont_wait=True):
|
||||||
import folder_paths
|
|
||||||
|
|
||||||
self.custom_node_map_cache = {}
|
self.custom_node_map_cache = {}
|
||||||
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath
|
||||||
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
self.nightly_inactive_nodes = {} # node_id -> fullpath
|
||||||
@@ -711,18 +805,17 @@ class UnifiedManager:
|
|||||||
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
self.unknown_active_nodes = {} # node_id -> repo url * fullpath
|
||||||
self.active_nodes = {} # node_id -> node_version * fullpath
|
self.active_nodes = {} # node_id -> node_version * fullpath
|
||||||
|
|
||||||
if get_config()['network_mode'] != 'public' or manager_util.is_manager_pip_package():
|
if get_config()['network_mode'] != 'public':
|
||||||
dont_wait = True
|
dont_wait = True
|
||||||
|
|
||||||
if update_cnr_map:
|
# reload 'cnr_map' and 'repo_cnr_map'
|
||||||
# reload 'cnr_map' and 'repo_cnr_map'
|
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
|
||||||
cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode=='cache', dont_wait=dont_wait)
|
|
||||||
|
|
||||||
for x in cnrs:
|
for x in cnrs:
|
||||||
self.cnr_map[x['id']] = x
|
self.cnr_map[x['id']] = x
|
||||||
if 'repository' in x:
|
if 'repository' in x:
|
||||||
normalized_url = git_utils.normalize_url(x['repository'])
|
normalized_url = git_utils.normalize_url(x['repository'])
|
||||||
self.repo_cnr_map[normalized_url] = x
|
self.repo_cnr_map[normalized_url] = x
|
||||||
|
|
||||||
# reload node status info from custom_nodes/*
|
# reload node status info from custom_nodes/*
|
||||||
for custom_nodes_path in folder_paths.get_folder_paths('custom_nodes'):
|
for custom_nodes_path in folder_paths.get_folder_paths('custom_nodes'):
|
||||||
@@ -770,7 +863,7 @@ class UnifiedManager:
|
|||||||
if 'id' in x:
|
if 'id' in x:
|
||||||
if x['id'] not in res:
|
if x['id'] not in res:
|
||||||
res[x['id']] = (x, True)
|
res[x['id']] = (x, True)
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] broken item:{x}")
|
logging.error(f"[ComfyUI-Manager] broken item:{x}")
|
||||||
|
|
||||||
return res
|
return res
|
||||||
@@ -823,7 +916,7 @@ class UnifiedManager:
|
|||||||
def safe_version(ver_str):
|
def safe_version(ver_str):
|
||||||
try:
|
try:
|
||||||
return version.parse(ver_str)
|
return version.parse(ver_str)
|
||||||
except Exception:
|
except:
|
||||||
return version.parse("0.0.0")
|
return version.parse("0.0.0")
|
||||||
|
|
||||||
def execute_install_script(self, url, repo_path, instant_execution=False, lazy_mode=False, no_deps=False):
|
def execute_install_script(self, url, repo_path, instant_execution=False, lazy_mode=False, no_deps=False):
|
||||||
@@ -837,14 +930,15 @@ class UnifiedManager:
|
|||||||
else:
|
else:
|
||||||
if os.path.exists(requirements_path) and not no_deps:
|
if os.path.exists(requirements_path) and not no_deps:
|
||||||
print("Install: pip packages")
|
print("Install: pip packages")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
lines = manager_util.robust_readlines(requirements_path)
|
lines = manager_util.robust_readlines(requirements_path)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
package_name = remap_pip_package(line.strip())
|
package_name = remap_pip_package(line.strip())
|
||||||
if package_name and not package_name.startswith('#') and package_name not in self.processed_install:
|
if package_name and not package_name.startswith('#') and package_name not in self.processed_install:
|
||||||
self.processed_install.add(package_name)
|
self.processed_install.add(package_name)
|
||||||
install_cmd = manager_util.make_pip_cmd(["install", package_name])
|
clean_package_name = package_name.split('#')[0].strip()
|
||||||
if package_name.strip() != "" and not package_name.startswith('#'):
|
install_cmd = manager_util.make_pip_cmd(["install", clean_package_name])
|
||||||
|
if clean_package_name != "" and not clean_package_name.startswith('#'):
|
||||||
res = res and try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution)
|
res = res and try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution)
|
||||||
|
|
||||||
pip_fixer.fix_broken()
|
pip_fixer.fix_broken()
|
||||||
@@ -858,7 +952,7 @@ class UnifiedManager:
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps):
|
def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps):
|
||||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||||
with open(script_path, "a") as file:
|
with open(script_path, "a") as file:
|
||||||
obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, get_default_custom_nodes_path(), sys.executable]
|
obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, get_default_custom_nodes_path(), sys.executable]
|
||||||
file.write(f"{obj}\n")
|
file.write(f"{obj}\n")
|
||||||
@@ -1264,7 +1358,7 @@ class UnifiedManager:
|
|||||||
print(f"Download: git clone '{clone_url}'")
|
print(f"Download: git clone '{clone_url}'")
|
||||||
|
|
||||||
if not instant_execution and platform.system() == 'Windows':
|
if not instant_execution and platform.system() == 'Windows':
|
||||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||||
if res != 0:
|
if res != 0:
|
||||||
return result.fail(f"Failed to clone repo: {clone_url}")
|
return result.fail(f"Failed to clone repo: {clone_url}")
|
||||||
else:
|
else:
|
||||||
@@ -1390,6 +1484,7 @@ class UnifiedManager:
|
|||||||
return ManagedResult('skip')
|
return ManagedResult('skip')
|
||||||
elif self.is_disabled(node_id):
|
elif self.is_disabled(node_id):
|
||||||
return self.unified_enable(node_id)
|
return self.unified_enable(node_id)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
version_spec = self.resolve_unspecified_version(node_id)
|
version_spec = self.resolve_unspecified_version(node_id)
|
||||||
|
|
||||||
@@ -1416,20 +1511,12 @@ class UnifiedManager:
|
|||||||
return self.unified_enable(node_id, version_spec)
|
return self.unified_enable(node_id, version_spec)
|
||||||
|
|
||||||
elif version_spec == 'unknown' or version_spec == 'nightly':
|
elif version_spec == 'unknown' or version_spec == 'nightly':
|
||||||
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), node_id))
|
|
||||||
|
|
||||||
if version_spec == 'nightly':
|
if version_spec == 'nightly':
|
||||||
# disable cnr nodes
|
# disable cnr nodes
|
||||||
if self.is_enabled(node_id, 'cnr'):
|
if self.is_enabled(node_id, 'cnr'):
|
||||||
self.unified_disable(node_id, False)
|
self.unified_disable(node_id, False)
|
||||||
|
|
||||||
# use `repo name` as a dir name instead of `cnr id` if system added nodepack (i.e. publisher is null)
|
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), node_id))
|
||||||
cnr = self.cnr_map.get(node_id)
|
|
||||||
|
|
||||||
if cnr is not None and cnr.get('publisher') is None:
|
|
||||||
repo_name = os.path.basename(git_utils.normalize_url(repo_url))
|
|
||||||
to_path = os.path.abspath(os.path.join(get_default_custom_nodes_path(), repo_name))
|
|
||||||
|
|
||||||
res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall)
|
res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall)
|
||||||
if res.result:
|
if res.result:
|
||||||
if version_spec == 'unknown':
|
if version_spec == 'unknown':
|
||||||
@@ -1490,7 +1577,7 @@ def identify_node_pack_from_path(fullpath):
|
|||||||
if github_id is None:
|
if github_id is None:
|
||||||
try:
|
try:
|
||||||
github_id = os.path.basename(repo_url)
|
github_id = os.path.basename(repo_url)
|
||||||
except Exception:
|
except:
|
||||||
logging.warning(f"[ComfyUI-Manager] unexpected repo url: {repo_url}")
|
logging.warning(f"[ComfyUI-Manager] unexpected repo url: {repo_url}")
|
||||||
github_id = module_name
|
github_id = module_name
|
||||||
|
|
||||||
@@ -1545,10 +1632,10 @@ def get_channel_dict():
|
|||||||
if channel_dict is None:
|
if channel_dict is None:
|
||||||
channel_dict = {}
|
channel_dict = {}
|
||||||
|
|
||||||
if not os.path.exists(context.manager_channel_list_path):
|
if not os.path.exists(manager_channel_list_path):
|
||||||
shutil.copy(context.channel_list_template_path, context.manager_channel_list_path)
|
shutil.copy(channel_list_template_path, manager_channel_list_path)
|
||||||
|
|
||||||
with open(context.manager_channel_list_path, 'r') as file:
|
with open(manager_channel_list_path, 'r') as file:
|
||||||
channels = file.read()
|
channels = file.read()
|
||||||
for x in channels.split('\n'):
|
for x in channels.split('\n'):
|
||||||
channel_info = x.split("::")
|
channel_info = x.split("::")
|
||||||
@@ -1612,18 +1699,18 @@ def write_config():
|
|||||||
'db_mode': get_config()['db_mode'],
|
'db_mode': get_config()['db_mode'],
|
||||||
}
|
}
|
||||||
|
|
||||||
directory = os.path.dirname(context.manager_config_path)
|
directory = os.path.dirname(manager_config_path)
|
||||||
if not os.path.exists(directory):
|
if not os.path.exists(directory):
|
||||||
os.makedirs(directory)
|
os.makedirs(directory)
|
||||||
|
|
||||||
with open(context.manager_config_path, 'w') as configfile:
|
with open(manager_config_path, 'w') as configfile:
|
||||||
config.write(configfile)
|
config.write(configfile)
|
||||||
|
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
try:
|
try:
|
||||||
config = configparser.ConfigParser(strict=False)
|
config = configparser.ConfigParser(strict=False)
|
||||||
config.read(context.manager_config_path)
|
config.read(manager_config_path)
|
||||||
default_conf = config['default']
|
default_conf = config['default']
|
||||||
|
|
||||||
def get_bool(key, default_value):
|
def get_bool(key, default_value):
|
||||||
@@ -1636,7 +1723,7 @@ def read_config():
|
|||||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||||
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
'preview_method': default_conf.get('preview_method', manager_funcs.get_current_preview_method()).lower(),
|
||||||
'git_exe': default_conf.get('git_exe', ''),
|
'git_exe': default_conf.get('git_exe', ''),
|
||||||
'use_uv': get_bool('use_uv', True),
|
'use_uv': get_bool('use_uv', False),
|
||||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||||
@@ -1648,20 +1735,22 @@ def read_config():
|
|||||||
'model_download_by_agent': get_bool('model_download_by_agent', False),
|
'model_download_by_agent': get_bool('model_download_by_agent', False),
|
||||||
'downgrade_blacklist': default_conf.get('downgrade_blacklist', '').lower(),
|
'downgrade_blacklist': default_conf.get('downgrade_blacklist', '').lower(),
|
||||||
'always_lazy_install': get_bool('always_lazy_install', False),
|
'always_lazy_install': get_bool('always_lazy_install', False),
|
||||||
'network_mode': default_conf.get('network_mode', NetworkMode.PUBLIC.value).lower(),
|
'network_mode': default_conf.get('network_mode', 'public').lower(),
|
||||||
'security_level': default_conf.get('security_level', SecurityLevel.NORMAL.value).lower(),
|
'security_level': default_conf.get('security_level', 'normal').lower(),
|
||||||
'db_mode': default_conf.get('db_mode', DBMode.CACHE.value).lower(),
|
'db_mode': default_conf.get('db_mode', 'cache').lower(),
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
manager_util.use_uv = False
|
import importlib.util
|
||||||
|
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
|
||||||
|
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
|
||||||
manager_util.bypass_ssl = False
|
manager_util.bypass_ssl = False
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'http_channel_enabled': False,
|
'http_channel_enabled': False,
|
||||||
'preview_method': manager_funcs.get_current_preview_method(),
|
'preview_method': manager_funcs.get_current_preview_method(),
|
||||||
'git_exe': '',
|
'git_exe': '',
|
||||||
'use_uv': True,
|
'use_uv': manager_util.use_uv,
|
||||||
'channel_url': DEFAULT_CHANNEL,
|
'channel_url': DEFAULT_CHANNEL,
|
||||||
'default_cache_as_channel_url': False,
|
'default_cache_as_channel_url': False,
|
||||||
'share_option': 'all',
|
'share_option': 'all',
|
||||||
@@ -1673,9 +1762,9 @@ def read_config():
|
|||||||
'model_download_by_agent': False,
|
'model_download_by_agent': False,
|
||||||
'downgrade_blacklist': '',
|
'downgrade_blacklist': '',
|
||||||
'always_lazy_install': False,
|
'always_lazy_install': False,
|
||||||
'network_mode': NetworkMode.PUBLIC.value,
|
'network_mode': 'public', # public | private | offline
|
||||||
'security_level': SecurityLevel.NORMAL.value,
|
'security_level': 'normal', # strong | normal | normal- | weak
|
||||||
'db_mode': DBMode.CACHE.value,
|
'db_mode': 'cache', # local | cache | remote
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1719,27 +1808,27 @@ def switch_to_default_branch(repo):
|
|||||||
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
default_branch = repo.git.symbolic_ref(f'refs/remotes/{remote_name}/HEAD').replace(f'refs/remotes/{remote_name}/', '')
|
||||||
repo.git.checkout(default_branch)
|
repo.git.checkout(default_branch)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
# try checkout master
|
# try checkout master
|
||||||
# try checkout main if failed
|
# try checkout main if failed
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
repo.git.checkout('-b', 'master', f'{remote_name}/master')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.main)
|
repo.git.checkout(repo.heads.main)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
try:
|
try:
|
||||||
if remote_name is not None:
|
if remote_name is not None:
|
||||||
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
repo.git.checkout('-b', 'main', f'{remote_name}/main')
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print("[ComfyUI Manager] Failed to switch to the default branch")
|
print("[ComfyUI Manager] Failed to switch to the default branch")
|
||||||
@@ -1747,10 +1836,10 @@ def switch_to_default_branch(repo):
|
|||||||
|
|
||||||
|
|
||||||
def reserve_script(repo_path, install_cmds):
|
def reserve_script(repo_path, install_cmds):
|
||||||
if not os.path.exists(context.manager_startup_script_path):
|
if not os.path.exists(manager_startup_script_path):
|
||||||
os.makedirs(context.manager_startup_script_path)
|
os.makedirs(manager_startup_script_path)
|
||||||
|
|
||||||
script_path = os.path.join(context.manager_startup_script_path, "install-scripts.txt")
|
script_path = os.path.join(manager_startup_script_path, "install-scripts.txt")
|
||||||
with open(script_path, "a") as file:
|
with open(script_path, "a") as file:
|
||||||
obj = [repo_path] + install_cmds
|
obj = [repo_path] + install_cmds
|
||||||
file.write(f"{obj}\n")
|
file.write(f"{obj}\n")
|
||||||
@@ -1790,7 +1879,7 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
|||||||
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.")
|
||||||
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
print("[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.")
|
||||||
print("###################################################################\n\n")
|
print("###################################################################\n\n")
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if code != 0:
|
if code != 0:
|
||||||
@@ -1805,11 +1894,11 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False):
|
|||||||
# use subprocess to avoid file system lock by git (Windows)
|
# use subprocess to avoid file system lock by git (Windows)
|
||||||
def __win_check_git_update(path, do_fetch=False, do_update=False):
|
def __win_check_git_update(path, do_fetch=False, do_update=False):
|
||||||
if do_fetch:
|
if do_fetch:
|
||||||
command = [sys.executable, context.git_script_path, "--fetch", path]
|
command = [sys.executable, git_script_path, "--fetch", path]
|
||||||
elif do_update:
|
elif do_update:
|
||||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
command = [sys.executable, git_script_path, "--pull", path]
|
||||||
else:
|
else:
|
||||||
command = [sys.executable, context.git_script_path, "--check", path]
|
command = [sys.executable, git_script_path, "--check", path]
|
||||||
|
|
||||||
new_env = get_script_env()
|
new_env = get_script_env()
|
||||||
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=get_default_custom_nodes_path(), env=new_env)
|
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=get_default_custom_nodes_path(), env=new_env)
|
||||||
@@ -1863,7 +1952,7 @@ def __win_check_git_update(path, do_fetch=False, do_update=False):
|
|||||||
|
|
||||||
|
|
||||||
def __win_check_git_pull(path):
|
def __win_check_git_pull(path):
|
||||||
command = [sys.executable, context.git_script_path, "--pull", path]
|
command = [sys.executable, git_script_path, "--pull", path]
|
||||||
process = subprocess.Popen(command, env=get_script_env(), cwd=get_default_custom_nodes_path())
|
process = subprocess.Popen(command, env=get_script_env(), cwd=get_default_custom_nodes_path())
|
||||||
process.wait()
|
process.wait()
|
||||||
|
|
||||||
@@ -1879,7 +1968,7 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
|||||||
else:
|
else:
|
||||||
if os.path.exists(requirements_path) and not no_deps:
|
if os.path.exists(requirements_path) and not no_deps:
|
||||||
print("Install: pip packages")
|
print("Install: pip packages")
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, manager_files_path)
|
||||||
with open(requirements_path, "r") as requirements_file:
|
with open(requirements_path, "r") as requirements_file:
|
||||||
for line in requirements_file:
|
for line in requirements_file:
|
||||||
#handle comments
|
#handle comments
|
||||||
@@ -2059,6 +2148,13 @@ def is_valid_url(url):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def extract_url_and_commit_id(s):
|
||||||
|
index = s.rfind('@')
|
||||||
|
if index == -1:
|
||||||
|
return (s, '')
|
||||||
|
else:
|
||||||
|
return (s[:index], s[index+1:])
|
||||||
|
|
||||||
async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=False):
|
async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=False):
|
||||||
await unified_manager.reload('cache')
|
await unified_manager.reload('cache')
|
||||||
await unified_manager.get_custom_nodes('default', 'cache')
|
await unified_manager.get_custom_nodes('default', 'cache')
|
||||||
@@ -2076,8 +2172,11 @@ async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=
|
|||||||
cnr = unified_manager.get_cnr_by_repo(url)
|
cnr = unified_manager.get_cnr_by_repo(url)
|
||||||
if cnr:
|
if cnr:
|
||||||
cnr_id = cnr['id']
|
cnr_id = cnr['id']
|
||||||
return await unified_manager.install_by_id(cnr_id, version_spec='nightly', channel='default', mode='cache')
|
return await unified_manager.install_by_id(cnr_id, version_spec=None, channel='default', mode='cache')
|
||||||
else:
|
else:
|
||||||
|
new_url, commit_id = extract_url_and_commit_id(url)
|
||||||
|
if commit_id != "":
|
||||||
|
url = new_url
|
||||||
repo_name = os.path.splitext(os.path.basename(url))[0]
|
repo_name = os.path.splitext(os.path.basename(url))[0]
|
||||||
|
|
||||||
# NOTE: Keep original name as possible if unknown node
|
# NOTE: Keep original name as possible if unknown node
|
||||||
@@ -2105,11 +2204,15 @@ async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=
|
|||||||
clone_url = git_utils.get_url_for_clone(url)
|
clone_url = git_utils.get_url_for_clone(url)
|
||||||
|
|
||||||
if not instant_execution and platform.system() == 'Windows':
|
if not instant_execution and platform.system() == 'Windows':
|
||||||
res = manager_funcs.run_script([sys.executable, context.git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", get_default_custom_nodes_path(), clone_url, repo_path], cwd=get_default_custom_nodes_path())
|
||||||
if res != 0:
|
if res != 0:
|
||||||
return result.fail(f"Failed to clone '{clone_url}' into '{repo_path}'")
|
return result.fail(f"Failed to clone '{clone_url}' into '{repo_path}'")
|
||||||
else:
|
else:
|
||||||
repo = git.Repo.clone_from(clone_url, repo_path, recursive=True, progress=GitProgress())
|
repo = git.Repo.clone_from(clone_url, repo_path, recursive=True, progress=GitProgress())
|
||||||
|
if commit_id!= "":
|
||||||
|
repo.git.checkout(commit_id)
|
||||||
|
repo.git.submodule('update', '--init', '--recursive')
|
||||||
|
|
||||||
repo.git.clear_cache()
|
repo.git.clear_cache()
|
||||||
repo.close()
|
repo.close()
|
||||||
|
|
||||||
@@ -2172,7 +2275,7 @@ async def get_data_by_mode(mode, filename, channel_url=None):
|
|||||||
cache_uri = str(manager_util.simple_hash(uri))+'_'+filename
|
cache_uri = str(manager_util.simple_hash(uri))+'_'+filename
|
||||||
cache_uri = os.path.join(manager_util.cache_dir, cache_uri)
|
cache_uri = os.path.join(manager_util.cache_dir, cache_uri)
|
||||||
|
|
||||||
if get_config()['network_mode'] == 'offline' or manager_util.is_manager_pip_package():
|
if get_config()['network_mode'] == 'offline':
|
||||||
# offline network mode
|
# offline network mode
|
||||||
if os.path.exists(cache_uri):
|
if os.path.exists(cache_uri):
|
||||||
json_obj = await manager_util.get_data(cache_uri)
|
json_obj = await manager_util.get_data(cache_uri)
|
||||||
@@ -2192,7 +2295,7 @@ async def get_data_by_mode(mode, filename, channel_url=None):
|
|||||||
with open(cache_uri, "w", encoding='utf-8') as file:
|
with open(cache_uri, "w", encoding='utf-8') as file:
|
||||||
json.dump(json_obj, file, indent=4, sort_keys=True)
|
json.dump(json_obj, file, indent=4, sort_keys=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename} @ {channel_url}/{mode}\n=> {e}")
|
print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename}\n=> {e}")
|
||||||
uri = os.path.join(manager_util.comfyui_manager_path, filename)
|
uri = os.path.join(manager_util.comfyui_manager_path, filename)
|
||||||
json_obj = await manager_util.get_data(uri)
|
json_obj = await manager_util.get_data(uri)
|
||||||
|
|
||||||
@@ -2263,7 +2366,7 @@ def gitclone_uninstall(files):
|
|||||||
url = url[:-1]
|
url = url[:-1]
|
||||||
try:
|
try:
|
||||||
for custom_nodes_dir in get_custom_nodes_paths():
|
for custom_nodes_dir in get_custom_nodes_paths():
|
||||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||||
|
|
||||||
# safety check
|
# safety check
|
||||||
@@ -2311,7 +2414,7 @@ def gitclone_set_active(files, is_disable):
|
|||||||
url = url[:-1]
|
url = url[:-1]
|
||||||
try:
|
try:
|
||||||
for custom_nodes_dir in get_custom_nodes_paths():
|
for custom_nodes_dir in get_custom_nodes_paths():
|
||||||
dir_name:str = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "")
|
||||||
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
dir_path = os.path.join(custom_nodes_dir, dir_name)
|
||||||
|
|
||||||
# safety check
|
# safety check
|
||||||
@@ -2408,7 +2511,7 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
repo = git.Repo(repo_path)
|
repo = git.Repo(repo_path)
|
||||||
try:
|
try:
|
||||||
repo.git.checkout(repo.heads.master)
|
repo.git.checkout(repo.heads.master)
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] Failed to checkout 'master' branch.\nrepo_path={repo_path}\nAvailable branches:")
|
logging.error(f"[ComfyUI-Manager] Failed to checkout 'master' branch.\nrepo_path={repo_path}\nAvailable branches:")
|
||||||
for branch in repo.branches:
|
for branch in repo.branches:
|
||||||
logging.error('\t'+branch.name)
|
logging.error('\t'+branch.name)
|
||||||
@@ -2431,7 +2534,7 @@ def update_to_stable_comfyui(repo_path):
|
|||||||
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
||||||
repo.git.checkout(latest_tag)
|
repo.git.checkout(latest_tag)
|
||||||
return 'updated', latest_tag
|
return 'updated', latest_tag
|
||||||
except Exception:
|
except:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return "fail", None
|
return "fail", None
|
||||||
|
|
||||||
@@ -2584,7 +2687,7 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
await unified_manager.get_custom_nodes('default', 'cache')
|
await unified_manager.get_custom_nodes('default', 'cache')
|
||||||
|
|
||||||
# Get ComfyUI hash
|
# Get ComfyUI hash
|
||||||
repo_path = context.comfy_path
|
repo_path = comfy_path
|
||||||
|
|
||||||
comfyui_commit_hash = None
|
comfyui_commit_hash = None
|
||||||
if not custom_nodes_only:
|
if not custom_nodes_only:
|
||||||
@@ -2629,7 +2732,7 @@ async def get_current_snapshot(custom_nodes_only = False):
|
|||||||
commit_hash = git_utils.get_commit_hash(fullpath)
|
commit_hash = git_utils.get_commit_hash(fullpath)
|
||||||
url = git_utils.git_url(fullpath)
|
url = git_utils.git_url(fullpath)
|
||||||
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled)
|
||||||
except Exception:
|
except:
|
||||||
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
print(f"Failed to extract snapshots for the custom node '{path}'.")
|
||||||
|
|
||||||
elif path.endswith('.py'):
|
elif path.endswith('.py'):
|
||||||
@@ -2660,7 +2763,7 @@ async def save_snapshot_with_postfix(postfix, path=None, custom_nodes_only = Fal
|
|||||||
date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S")
|
date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
file_name = f"{date_time_format}_{postfix}"
|
file_name = f"{date_time_format}_{postfix}"
|
||||||
|
|
||||||
path = os.path.join(context.manager_snapshot_path, f"{file_name}.json")
|
path = os.path.join(manager_snapshot_path, f"{file_name}.json")
|
||||||
else:
|
else:
|
||||||
file_name = path.replace('\\', '/').split('/')[-1]
|
file_name = path.replace('\\', '/').split('/')[-1]
|
||||||
file_name = file_name.split('.')[-2]
|
file_name = file_name.split('.')[-2]
|
||||||
@@ -2687,7 +2790,7 @@ async def extract_nodes_from_workflow(filepath, mode='local', channel_url='defau
|
|||||||
with open(filepath, "r", encoding="UTF-8", errors="ignore") as json_file:
|
with open(filepath, "r", encoding="UTF-8", errors="ignore") as json_file:
|
||||||
try:
|
try:
|
||||||
workflow = json.load(json_file)
|
workflow = json.load(json_file)
|
||||||
except Exception:
|
except:
|
||||||
print(f"Invalid workflow file: {filepath}")
|
print(f"Invalid workflow file: {filepath}")
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|
||||||
@@ -2700,7 +2803,7 @@ async def extract_nodes_from_workflow(filepath, mode='local', channel_url='defau
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
workflow = json.loads(img.info['workflow'])
|
workflow = json.loads(img.info['workflow'])
|
||||||
except Exception:
|
except:
|
||||||
print(f"This is not a valid .png file containing a ComfyUI workflow: {filepath}")
|
print(f"This is not a valid .png file containing a ComfyUI workflow: {filepath}")
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|
||||||
@@ -2971,7 +3074,7 @@ def populate_github_stats(node_packs, json_obj_github):
|
|||||||
v['stars'] = -1
|
v['stars'] = -1
|
||||||
v['last_update'] = -1
|
v['last_update'] = -1
|
||||||
v['trust'] = False
|
v['trust'] = False
|
||||||
except Exception:
|
except:
|
||||||
logging.error(f"[ComfyUI-Manager] DB item is broken:\n{v}")
|
logging.error(f"[ComfyUI-Manager] DB item is broken:\n{v}")
|
||||||
|
|
||||||
|
|
||||||
@@ -3249,12 +3352,12 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
|||||||
|
|
||||||
def get_comfyui_versions(repo=None):
|
def get_comfyui_versions(repo=None):
|
||||||
if repo is None:
|
if repo is None:
|
||||||
repo = git.Repo(context.comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
remote = get_remote_name(repo)
|
remote = get_remote_name(repo)
|
||||||
repo.remotes[remote].fetch()
|
repo.remotes[remote].fetch()
|
||||||
except Exception:
|
except:
|
||||||
logging.error("[ComfyUI-Manager] Failed to fetch ComfyUI")
|
logging.error("[ComfyUI-Manager] Failed to fetch ComfyUI")
|
||||||
|
|
||||||
versions = [x.name for x in repo.tags if x.name.startswith('v')]
|
versions = [x.name for x in repo.tags if x.name.startswith('v')]
|
||||||
@@ -3283,7 +3386,7 @@ def get_comfyui_versions(repo=None):
|
|||||||
|
|
||||||
|
|
||||||
def switch_comfyui(tag):
|
def switch_comfyui(tag):
|
||||||
repo = git.Repo(context.comfy_path)
|
repo = git.Repo(comfy_path)
|
||||||
|
|
||||||
if tag == 'nightly':
|
if tag == 'nightly':
|
||||||
repo.git.checkout('master')
|
repo.git.checkout('master')
|
||||||
@@ -3323,5 +3426,5 @@ def repo_switch_commit(repo_path, commit_hash):
|
|||||||
|
|
||||||
repo.git.checkout(commit_hash)
|
repo.git.checkout(commit_hash)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -19,7 +19,6 @@ from functools import lru_cache
|
|||||||
|
|
||||||
|
|
||||||
cache_lock = threading.Lock()
|
cache_lock = threading.Lock()
|
||||||
session_lock = threading.Lock()
|
|
||||||
|
|
||||||
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||||
@@ -27,9 +26,6 @@ cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also up
|
|||||||
use_uv = False
|
use_uv = False
|
||||||
bypass_ssl = False
|
bypass_ssl = False
|
||||||
|
|
||||||
def is_manager_pip_package():
|
|
||||||
return not os.path.exists(os.path.join(comfyui_manager_path, '..', 'custom_nodes'))
|
|
||||||
|
|
||||||
def add_python_path_to_env():
|
def add_python_path_to_env():
|
||||||
if platform.system() != "Windows":
|
if platform.system() != "Windows":
|
||||||
sep = ':'
|
sep = ':'
|
||||||
@@ -101,7 +97,7 @@ def make_pip_cmd(cmd):
|
|||||||
# DON'T USE StrictVersion - cannot handle pre_release version
|
# DON'T USE StrictVersion - cannot handle pre_release version
|
||||||
# try:
|
# try:
|
||||||
# from distutils.version import StrictVersion
|
# from distutils.version import StrictVersion
|
||||||
# except Exception:
|
# except:
|
||||||
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
# print(f"[ComfyUI-Manager] 'distutils' package not found. Activating fallback mode for compatibility.")
|
||||||
class StrictVersion:
|
class StrictVersion:
|
||||||
def __init__(self, version_string):
|
def __init__(self, version_string):
|
||||||
@@ -556,7 +552,7 @@ def robust_readlines(fullpath):
|
|||||||
try:
|
try:
|
||||||
with open(fullpath, "r") as f:
|
with open(fullpath, "r") as f:
|
||||||
return f.readlines()
|
return f.readlines()
|
||||||
except Exception:
|
except:
|
||||||
encoding = None
|
encoding = None
|
||||||
with open(fullpath, "rb") as f:
|
with open(fullpath, "rb") as f:
|
||||||
raw_data = f.read()
|
raw_data = f.read()
|
||||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .git_utils import get_commit_hash
|
from git_utils import get_commit_hash
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -14,7 +14,6 @@ class InstalledNodePackage:
|
|||||||
fullpath: str
|
fullpath: str
|
||||||
disabled: bool
|
disabled: bool
|
||||||
version: str
|
version: str
|
||||||
repo_url: str = None # Git repository URL for nightly packages
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_unknown(self) -> bool:
|
def is_unknown(self) -> bool:
|
||||||
@@ -47,8 +46,6 @@ class InstalledNodePackage:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_fullpath(fullpath: str, resolve_from_path) -> InstalledNodePackage:
|
def from_fullpath(fullpath: str, resolve_from_path) -> InstalledNodePackage:
|
||||||
from . import git_utils
|
|
||||||
|
|
||||||
parent_folder_name = os.path.basename(os.path.dirname(fullpath))
|
parent_folder_name = os.path.basename(os.path.dirname(fullpath))
|
||||||
module_name = os.path.basename(fullpath)
|
module_name = os.path.basename(fullpath)
|
||||||
|
|
||||||
@@ -57,10 +54,6 @@ class InstalledNodePackage:
|
|||||||
disabled = True
|
disabled = True
|
||||||
elif parent_folder_name == ".disabled":
|
elif parent_folder_name == ".disabled":
|
||||||
# Nodes under custom_nodes/.disabled/* are disabled
|
# Nodes under custom_nodes/.disabled/* are disabled
|
||||||
# Parse directory name format: packagename@version
|
|
||||||
# Examples:
|
|
||||||
# comfyui_sigmoidoffsetscheduler@nightly → id: comfyui_sigmoidoffsetscheduler, version: nightly
|
|
||||||
# comfyui_sigmoidoffsetscheduler@1_0_2 → id: comfyui_sigmoidoffsetscheduler, version: 1.0.2
|
|
||||||
node_id = module_name
|
node_id = module_name
|
||||||
disabled = True
|
disabled = True
|
||||||
else:
|
else:
|
||||||
@@ -68,35 +61,12 @@ class InstalledNodePackage:
|
|||||||
disabled = False
|
disabled = False
|
||||||
|
|
||||||
info = resolve_from_path(fullpath)
|
info = resolve_from_path(fullpath)
|
||||||
repo_url = None
|
|
||||||
version_from_dirname = None
|
|
||||||
|
|
||||||
# For disabled packages, try to extract version from directory name
|
|
||||||
if disabled and parent_folder_name == ".disabled" and '@' in module_name:
|
|
||||||
parts = module_name.split('@')
|
|
||||||
if len(parts) == 2:
|
|
||||||
node_id = parts[0] # Use the normalized name from directory
|
|
||||||
version_from_dirname = parts[1].replace('_', '.') # Convert 1_0_2 → 1.0.2
|
|
||||||
|
|
||||||
if info is None:
|
if info is None:
|
||||||
version = version_from_dirname if version_from_dirname else 'unknown'
|
version = 'unknown'
|
||||||
else:
|
else:
|
||||||
node_id = info['id'] # robust module guessing
|
node_id = info['id'] # robust module guessing
|
||||||
# Prefer version from directory name for disabled packages (preserves 'nightly' literal)
|
version = info['ver']
|
||||||
# Otherwise use version from package inspection (commit hash for git repos)
|
|
||||||
if version_from_dirname:
|
|
||||||
version = version_from_dirname
|
|
||||||
else:
|
|
||||||
version = info['ver']
|
|
||||||
|
|
||||||
# Get repository URL for both nightly and CNR packages
|
|
||||||
if version == 'nightly':
|
|
||||||
# For nightly packages, get repo URL from git
|
|
||||||
repo_url = git_utils.git_url(fullpath)
|
|
||||||
elif 'url' in info and info['url']:
|
|
||||||
# For CNR packages, get repo URL from pyproject.toml
|
|
||||||
repo_url = info['url']
|
|
||||||
|
|
||||||
return InstalledNodePackage(
|
return InstalledNodePackage(
|
||||||
id=node_id, fullpath=fullpath, disabled=disabled, version=version, repo_url=repo_url
|
id=node_id, fullpath=fullpath, disabled=disabled, version=version
|
||||||
)
|
)
|
||||||
@@ -2,7 +2,7 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from . import manager_util
|
import manager_util
|
||||||
|
|
||||||
|
|
||||||
def security_check():
|
def security_check():
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
from ..common import context
|
import manager_core as core
|
||||||
from . import manager_core as core
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -55,7 +53,7 @@ def compute_sha256_checksum(filepath):
|
|||||||
return sha256.hexdigest()
|
return sha256.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/share_option")
|
@PromptServer.instance.routes.get("/manager/share_option")
|
||||||
async def share_option(request):
|
async def share_option(request):
|
||||||
if "value" in request.rel_url.query:
|
if "value" in request.rel_url.query:
|
||||||
core.get_config()['share_option'] = request.rel_url.query['value']
|
core.get_config()['share_option'] = request.rel_url.query['value']
|
||||||
@@ -67,21 +65,21 @@ async def share_option(request):
|
|||||||
|
|
||||||
|
|
||||||
def get_openart_auth():
|
def get_openart_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".openart_key")):
|
if not os.path.exists(os.path.join(core.manager_files_path, ".openart_key")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "r") as f:
|
with open(os.path.join(core.manager_files_path, ".openart_key"), "r") as f:
|
||||||
openart_key = f.read().strip()
|
openart_key = f.read().strip()
|
||||||
return openart_key if openart_key else None
|
return openart_key if openart_key else None
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_matrix_auth():
|
def get_matrix_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "matrix_auth")):
|
if not os.path.exists(os.path.join(core.manager_files_path, "matrix_auth")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "r") as f:
|
with open(os.path.join(core.manager_files_path, "matrix_auth"), "r") as f:
|
||||||
matrix_auth = f.read()
|
matrix_auth = f.read()
|
||||||
homeserver, username, password = matrix_auth.strip().split("\n")
|
homeserver, username, password = matrix_auth.strip().split("\n")
|
||||||
if not homeserver or not username or not password:
|
if not homeserver or not username or not password:
|
||||||
@@ -91,40 +89,40 @@ def get_matrix_auth():
|
|||||||
"username": username,
|
"username": username,
|
||||||
"password": password,
|
"password": password,
|
||||||
}
|
}
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_comfyworkflows_auth():
|
def get_comfyworkflows_auth():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, "comfyworkflows_sharekey")):
|
if not os.path.exists(os.path.join(core.manager_files_path, "comfyworkflows_sharekey")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "r") as f:
|
||||||
share_key = f.read()
|
share_key = f.read()
|
||||||
if not share_key.strip():
|
if not share_key.strip():
|
||||||
return None
|
return None
|
||||||
return share_key
|
return share_key
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_youml_settings():
|
def get_youml_settings():
|
||||||
if not os.path.exists(os.path.join(context.manager_files_path, ".youml")):
|
if not os.path.exists(os.path.join(core.manager_files_path, ".youml")):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "r") as f:
|
with open(os.path.join(core.manager_files_path, ".youml"), "r") as f:
|
||||||
youml_settings = f.read().strip()
|
youml_settings = f.read().strip()
|
||||||
return youml_settings if youml_settings else None
|
return youml_settings if youml_settings else None
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def set_youml_settings(settings):
|
def set_youml_settings(settings):
|
||||||
with open(os.path.join(context.manager_files_path, ".youml"), "w") as f:
|
with open(os.path.join(core.manager_files_path, ".youml"), "w") as f:
|
||||||
f.write(settings)
|
f.write(settings)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_openart_auth")
|
@PromptServer.instance.routes.get("/manager/get_openart_auth")
|
||||||
async def api_get_openart_auth(request):
|
async def api_get_openart_auth(request):
|
||||||
# print("Getting stored Matrix credentials...")
|
# print("Getting stored Matrix credentials...")
|
||||||
openart_key = get_openart_auth()
|
openart_key = get_openart_auth()
|
||||||
@@ -133,16 +131,16 @@ async def api_get_openart_auth(request):
|
|||||||
return web.json_response({"openart_key": openart_key})
|
return web.json_response({"openart_key": openart_key})
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_openart_auth")
|
@PromptServer.instance.routes.post("/manager/set_openart_auth")
|
||||||
async def api_set_openart_auth(request):
|
async def api_set_openart_auth(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
openart_key = json_data['openart_key']
|
openart_key = json_data['openart_key']
|
||||||
with open(os.path.join(context.manager_files_path, ".openart_key"), "w") as f:
|
with open(os.path.join(core.manager_files_path, ".openart_key"), "w") as f:
|
||||||
f.write(openart_key)
|
f.write(openart_key)
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_matrix_auth")
|
@PromptServer.instance.routes.get("/manager/get_matrix_auth")
|
||||||
async def api_get_matrix_auth(request):
|
async def api_get_matrix_auth(request):
|
||||||
# print("Getting stored Matrix credentials...")
|
# print("Getting stored Matrix credentials...")
|
||||||
matrix_auth = get_matrix_auth()
|
matrix_auth = get_matrix_auth()
|
||||||
@@ -151,7 +149,7 @@ async def api_get_matrix_auth(request):
|
|||||||
return web.json_response(matrix_auth)
|
return web.json_response(matrix_auth)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/youml/settings")
|
@PromptServer.instance.routes.get("/manager/youml/settings")
|
||||||
async def api_get_youml_settings(request):
|
async def api_get_youml_settings(request):
|
||||||
youml_settings = get_youml_settings()
|
youml_settings = get_youml_settings()
|
||||||
if not youml_settings:
|
if not youml_settings:
|
||||||
@@ -159,14 +157,14 @@ async def api_get_youml_settings(request):
|
|||||||
return web.json_response(json.loads(youml_settings))
|
return web.json_response(json.loads(youml_settings))
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/youml/settings")
|
@PromptServer.instance.routes.post("/manager/youml/settings")
|
||||||
async def api_set_youml_settings(request):
|
async def api_set_youml_settings(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
set_youml_settings(json.dumps(json_data))
|
set_youml_settings(json.dumps(json_data))
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_comfyworkflows_auth")
|
@PromptServer.instance.routes.get("/manager/get_comfyworkflows_auth")
|
||||||
async def api_get_comfyworkflows_auth(request):
|
async def api_get_comfyworkflows_auth(request):
|
||||||
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
# Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken'
|
||||||
# in the same directory as the ComfyUI base folder
|
# in the same directory as the ComfyUI base folder
|
||||||
@@ -177,17 +175,17 @@ async def api_get_comfyworkflows_auth(request):
|
|||||||
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
return web.json_response({"comfyworkflows_sharekey": comfyworkflows_auth})
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/set_esheep_workflow_and_images")
|
@PromptServer.instance.routes.post("/manager/set_esheep_workflow_and_images")
|
||||||
async def set_esheep_workflow_and_images(request):
|
async def set_esheep_workflow_and_images(request):
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), "w", encoding='utf-8') as file:
|
||||||
json.dump(json_data, file, indent=4)
|
json.dump(json_data, file, indent=4)
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.get("/v2/manager/get_esheep_workflow_and_images")
|
@PromptServer.instance.routes.get("/manager/get_esheep_workflow_and_images")
|
||||||
async def get_esheep_workflow_and_images(request):
|
async def get_esheep_workflow_and_images(request):
|
||||||
with open(os.path.join(context.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
with open(os.path.join(core.manager_files_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
return web.Response(status=200, text=json.dumps(data))
|
return web.Response(status=200, text=json.dumps(data))
|
||||||
|
|
||||||
@@ -196,12 +194,12 @@ def set_matrix_auth(json_data):
|
|||||||
homeserver = json_data['homeserver']
|
homeserver = json_data['homeserver']
|
||||||
username = json_data['username']
|
username = json_data['username']
|
||||||
password = json_data['password']
|
password = json_data['password']
|
||||||
with open(os.path.join(context.manager_files_path, "matrix_auth"), "w") as f:
|
with open(os.path.join(core.manager_files_path, "matrix_auth"), "w") as f:
|
||||||
f.write("\n".join([homeserver, username, password]))
|
f.write("\n".join([homeserver, username, password]))
|
||||||
|
|
||||||
|
|
||||||
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
def set_comfyworkflows_auth(comfyworkflows_sharekey):
|
||||||
with open(os.path.join(context.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
with open(os.path.join(core.manager_files_path, "comfyworkflows_sharekey"), "w") as f:
|
||||||
f.write(comfyworkflows_sharekey)
|
f.write(comfyworkflows_sharekey)
|
||||||
|
|
||||||
|
|
||||||
@@ -213,7 +211,7 @@ def has_provided_comfyworkflows_auth(comfyworkflows_sharekey):
|
|||||||
return comfyworkflows_sharekey.strip()
|
return comfyworkflows_sharekey.strip()
|
||||||
|
|
||||||
|
|
||||||
@PromptServer.instance.routes.post("/v2/manager/share")
|
@PromptServer.instance.routes.post("/manager/share")
|
||||||
async def share_art(request):
|
async def share_art(request):
|
||||||
# get json data
|
# get json data
|
||||||
json_data = await request.json()
|
json_data = await request.json()
|
||||||
@@ -235,7 +233,7 @@ async def share_art(request):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
output_to_share = potential_outputs[int(selected_output_index)]
|
output_to_share = potential_outputs[int(selected_output_index)]
|
||||||
except Exception:
|
except:
|
||||||
# for now, pick the first output
|
# for now, pick the first output
|
||||||
output_to_share = potential_outputs[0]
|
output_to_share = potential_outputs[0]
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ async function tryInstallCustomNode(event) {
|
|||||||
const res = await customConfirm(msg);
|
const res = await customConfirm(msg);
|
||||||
if(res) {
|
if(res) {
|
||||||
if(event.detail.target.installed == 'Disabled') {
|
if(event.detail.target.installed == 'Disabled') {
|
||||||
const response = await api.fetchApi(`/v2/customnode/toggle_active`, {
|
const response = await api.fetchApi(`/customnode/toggle_active`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(event.detail.target)
|
body: JSON.stringify(event.detail.target)
|
||||||
@@ -35,7 +35,7 @@ async function tryInstallCustomNode(event) {
|
|||||||
await sleep(300);
|
await sleep(300);
|
||||||
app.ui.dialog.show(`Installing... '${event.detail.target.title}'`);
|
app.ui.dialog.show(`Installing... '${event.detail.target.title}'`);
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/customnode/install`, {
|
const response = await api.fetchApi(`/customnode/install`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(event.detail.target)
|
body: JSON.stringify(event.detail.target)
|
||||||
@@ -52,7 +52,7 @@ async function tryInstallCustomNode(event) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = await api.fetchApi("/v2/manager/reboot");
|
let response = await api.fetchApi("/manager/reboot");
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
show_message('This action is not allowed with this security level configuration.');
|
||||||
return false;
|
return false;
|
||||||
@@ -14,9 +14,9 @@ import { OpenArtShareDialog } from "./comfyui-share-openart.js";
|
|||||||
import {
|
import {
|
||||||
free_models, install_pip, install_via_git_url, manager_instance,
|
free_models, install_pip, install_via_git_url, manager_instance,
|
||||||
rebootAPI, setManagerInstance, show_message, customAlert, customPrompt,
|
rebootAPI, setManagerInstance, show_message, customAlert, customPrompt,
|
||||||
infoToast, showTerminal, setNeedRestart, generateUUID
|
infoToast, showTerminal, setNeedRestart
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
import { ComponentBuilderDialog, load_components, set_component_policy } from "./components-manager.js";
|
import { ComponentBuilderDialog, getPureName, load_components, set_component_policy } from "./components-manager.js";
|
||||||
import { CustomNodesManager } from "./custom-nodes-manager.js";
|
import { CustomNodesManager } from "./custom-nodes-manager.js";
|
||||||
import { ModelManager } from "./model-manager.js";
|
import { ModelManager } from "./model-manager.js";
|
||||||
import { SnapshotManager } from "./snapshot.js";
|
import { SnapshotManager } from "./snapshot.js";
|
||||||
@@ -189,7 +189,8 @@ docStyle.innerHTML = `
|
|||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
function isBeforeFrontendVersion(compareVersion) {
|
function is_legacy_front() {
|
||||||
|
let compareVersion = '1.2.49';
|
||||||
try {
|
try {
|
||||||
const frontendVersion = window['__COMFYUI_FRONTEND_VERSION__'];
|
const frontendVersion = window['__COMFYUI_FRONTEND_VERSION__'];
|
||||||
if (typeof frontendVersion !== 'string') {
|
if (typeof frontendVersion !== 'string') {
|
||||||
@@ -231,7 +232,7 @@ var restart_stop_button = null;
|
|||||||
var update_policy_combo = null;
|
var update_policy_combo = null;
|
||||||
|
|
||||||
let share_option = 'all';
|
let share_option = 'all';
|
||||||
var batch_id = null;
|
var is_updating = false;
|
||||||
|
|
||||||
|
|
||||||
// copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts
|
// copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts
|
||||||
@@ -414,7 +415,7 @@ const style = `
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
async function init_share_option() {
|
async function init_share_option() {
|
||||||
api.fetchApi('/v2/manager/share_option')
|
api.fetchApi('/manager/share_option')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
share_option = data || 'all';
|
share_option = data || 'all';
|
||||||
@@ -422,7 +423,7 @@ async function init_share_option() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function init_notice(notice) {
|
async function init_notice(notice) {
|
||||||
api.fetchApi('/v2/manager/notice')
|
api.fetchApi('/manager/notice')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
notice.innerHTML = data;
|
notice.innerHTML = data;
|
||||||
@@ -473,19 +474,14 @@ async function updateComfyUI() {
|
|||||||
let prev_text = update_comfyui_button.innerText;
|
let prev_text = update_comfyui_button.innerText;
|
||||||
update_comfyui_button.innerText = "Updating ComfyUI...";
|
update_comfyui_button.innerText = "Updating ComfyUI...";
|
||||||
|
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
|
|
||||||
|
const response = await api.fetchApi('/manager/queue/update_comfyui');
|
||||||
|
|
||||||
showTerminal();
|
showTerminal();
|
||||||
|
|
||||||
batch_id = generateUUID();
|
is_updating = true;
|
||||||
|
await api.fetchApi('/manager/queue/start');
|
||||||
let batch = {};
|
|
||||||
batch['batch_id'] = batch_id;
|
|
||||||
batch['update_comfyui'] = true;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function showVersionSelectorDialog(versions, current, onSelect) {
|
function showVersionSelectorDialog(versions, current, onSelect) {
|
||||||
@@ -616,7 +612,7 @@ async function switchComfyUI() {
|
|||||||
switch_comfyui_button.disabled = true;
|
switch_comfyui_button.disabled = true;
|
||||||
switch_comfyui_button.style.backgroundColor = "gray";
|
switch_comfyui_button.style.backgroundColor = "gray";
|
||||||
|
|
||||||
let res = await api.fetchApi(`/v2/comfyui_manager/comfyui_versions`, { cache: "no-store" });
|
let res = await api.fetchApi(`/comfyui_manager/comfyui_versions`, { cache: "no-store" });
|
||||||
|
|
||||||
switch_comfyui_button.disabled = false;
|
switch_comfyui_button.disabled = false;
|
||||||
switch_comfyui_button.style.backgroundColor = "";
|
switch_comfyui_button.style.backgroundColor = "";
|
||||||
@@ -635,14 +631,14 @@ async function switchComfyUI() {
|
|||||||
showVersionSelectorDialog(versions, obj.current, async (selected_version) => {
|
showVersionSelectorDialog(versions, obj.current, async (selected_version) => {
|
||||||
if(selected_version == 'nightly') {
|
if(selected_version == 'nightly') {
|
||||||
update_policy_combo.value = 'nightly-comfyui';
|
update_policy_combo.value = 'nightly-comfyui';
|
||||||
api.fetchApi('/v2/manager/policy/update?value=nightly-comfyui');
|
api.fetchApi('/manager/policy/update?value=nightly-comfyui');
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
update_policy_combo.value = 'stable-comfyui';
|
update_policy_combo.value = 'stable-comfyui';
|
||||||
api.fetchApi('/v2/manager/policy/update?value=stable-comfyui');
|
api.fetchApi('/manager/policy/update?value=stable-comfyui');
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = await api.fetchApi(`/v2/comfyui_manager/comfyui_switch_version?ver=${selected_version}`, { cache: "no-store" });
|
let response = await api.fetchApi(`/comfyui_manager/comfyui_switch_version?ver=${selected_version}`, { cache: "no-store" });
|
||||||
if (response.status == 200) {
|
if (response.status == 200) {
|
||||||
infoToast(`ComfyUI version is switched to ${selected_version}`);
|
infoToast(`ComfyUI version is switched to ${selected_version}`);
|
||||||
}
|
}
|
||||||
@@ -660,17 +656,18 @@ async function onQueueStatus(event) {
|
|||||||
const isElectron = 'electronAPI' in window;
|
const isElectron = 'electronAPI' in window;
|
||||||
|
|
||||||
if(event.detail.status == 'in_progress') {
|
if(event.detail.status == 'in_progress') {
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
update_all_button.innerText = `in progress.. (${event.detail.done_count}/${event.detail.total_count})`;
|
update_all_button.innerText = `in progress.. (${event.detail.done_count}/${event.detail.total_count})`;
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'all-done') {
|
else if(event.detail.status == 'done') {
|
||||||
reset_action_buttons();
|
reset_action_buttons();
|
||||||
}
|
|
||||||
else if(event.detail.status == 'batch-done') {
|
if(!is_updating) {
|
||||||
if(batch_id != event.detail.batch_id) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
is_updating = false;
|
||||||
|
|
||||||
let success_list = [];
|
let success_list = [];
|
||||||
let failed_list = [];
|
let failed_list = [];
|
||||||
let comfyui_state = null;
|
let comfyui_state = null;
|
||||||
@@ -770,28 +767,41 @@ api.addEventListener("cm-queue-status", onQueueStatus);
|
|||||||
async function updateAll(update_comfyui) {
|
async function updateAll(update_comfyui) {
|
||||||
update_all_button.innerText = "Updating...";
|
update_all_button.innerText = "Updating...";
|
||||||
|
|
||||||
// set_inprogress_mode();
|
set_inprogress_mode();
|
||||||
|
|
||||||
var mode = manager_instance.datasrc_combo.value;
|
var mode = manager_instance.datasrc_combo.value;
|
||||||
|
|
||||||
showTerminal();
|
showTerminal();
|
||||||
|
|
||||||
batch_id = generateUUID();
|
|
||||||
|
|
||||||
let batch = {};
|
|
||||||
if(update_comfyui) {
|
if(update_comfyui) {
|
||||||
update_all_button.innerText = "Updating ComfyUI...";
|
update_all_button.innerText = "Updating ComfyUI...";
|
||||||
batch['update_comfyui'] = true;
|
await api.fetchApi('/manager/queue/update_comfyui');
|
||||||
}
|
}
|
||||||
|
|
||||||
batch['update_all'] = mode;
|
const response = await api.fetchApi(`/manager/queue/update_all?mode=${mode}`);
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
if (response.status == 401) {
|
||||||
method: 'POST',
|
customAlert('Another task is already in progress. Please stop the ongoing task first.');
|
||||||
body: JSON.stringify(batch)
|
}
|
||||||
});
|
else if(response.status == 200) {
|
||||||
|
is_updating = true;
|
||||||
|
await api.fetchApi('/manager/queue/start');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function newDOMTokenList(initialTokens) {
|
||||||
|
const tmp = document.createElement(`div`);
|
||||||
|
|
||||||
|
const classList = tmp.classList;
|
||||||
|
if (initialTokens) {
|
||||||
|
initialTokens.forEach(token => {
|
||||||
|
classList.add(token);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return classList;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether the node is a potential output node (img, gif or video output)
|
* Check whether the node is a potential output node (img, gif or video output)
|
||||||
*/
|
*/
|
||||||
@@ -804,7 +814,7 @@ function restartOrStop() {
|
|||||||
rebootAPI();
|
rebootAPI();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -952,12 +962,12 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
this.datasrc_combo.appendChild($el('option', { value: 'local', text: 'DB: Local' }, []));
|
this.datasrc_combo.appendChild($el('option', { value: 'local', text: 'DB: Local' }, []));
|
||||||
this.datasrc_combo.appendChild($el('option', { value: 'remote', text: 'DB: Channel (remote)' }, []));
|
this.datasrc_combo.appendChild($el('option', { value: 'remote', text: 'DB: Channel (remote)' }, []));
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/db_mode')
|
api.fetchApi('/manager/db_mode')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { this.datasrc_combo.value = data; });
|
.then(data => { this.datasrc_combo.value = data; });
|
||||||
|
|
||||||
this.datasrc_combo.addEventListener('change', function (event) {
|
this.datasrc_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/db_mode?value=${event.target.value}`);
|
api.fetchApi(`/manager/db_mode?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
// preview method
|
// preview method
|
||||||
@@ -969,19 +979,19 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
preview_combo.appendChild($el('option', { value: 'latent2rgb', text: 'Preview method: Latent2RGB (fast)' }, []));
|
preview_combo.appendChild($el('option', { value: 'latent2rgb', text: 'Preview method: Latent2RGB (fast)' }, []));
|
||||||
preview_combo.appendChild($el('option', { value: 'none', text: 'Preview method: None (very fast)' }, []));
|
preview_combo.appendChild($el('option', { value: 'none', text: 'Preview method: None (very fast)' }, []));
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/preview_method')
|
api.fetchApi('/manager/preview_method')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { preview_combo.value = data; });
|
.then(data => { preview_combo.value = data; });
|
||||||
|
|
||||||
preview_combo.addEventListener('change', function (event) {
|
preview_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/preview_method?value=${event.target.value}`);
|
api.fetchApi(`/manager/preview_method?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
// channel
|
// channel
|
||||||
let channel_combo = document.createElement("select");
|
let channel_combo = document.createElement("select");
|
||||||
channel_combo.setAttribute("title", "Configure the channel for retrieving data from the Custom Node list (including missing nodes) or the Model list.");
|
channel_combo.setAttribute("title", "Configure the channel for retrieving data from the Custom Node list (including missing nodes) or the Model list.");
|
||||||
channel_combo.className = "cm-menu-combo";
|
channel_combo.className = "cm-menu-combo";
|
||||||
api.fetchApi('/v2/manager/channel_url_list')
|
api.fetchApi('/manager/channel_url_list')
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(async data => {
|
.then(async data => {
|
||||||
try {
|
try {
|
||||||
@@ -994,7 +1004,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
channel_combo.addEventListener('change', function (event) {
|
channel_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/channel_url_list?value=${event.target.value}`);
|
api.fetchApi(`/manager/channel_url_list?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
channel_combo.value = data.selected;
|
channel_combo.value = data.selected;
|
||||||
@@ -1022,7 +1032,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
share_combo.appendChild($el('option', { value: option[0], text: `Share: ${option[1]}` }, []));
|
share_combo.appendChild($el('option', { value: option[0], text: `Share: ${option[1]}` }, []));
|
||||||
}
|
}
|
||||||
|
|
||||||
api.fetchApi('/v2/manager/share_option')
|
api.fetchApi('/manager/share_option')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
share_combo.value = data || 'all';
|
share_combo.value = data || 'all';
|
||||||
@@ -1032,7 +1042,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
share_combo.addEventListener('change', function (event) {
|
share_combo.addEventListener('change', function (event) {
|
||||||
const value = event.target.value;
|
const value = event.target.value;
|
||||||
share_option = value;
|
share_option = value;
|
||||||
api.fetchApi(`/v2/manager/share_option?value=${value}`);
|
api.fetchApi(`/manager/share_option?value=${value}`);
|
||||||
const shareButton = document.getElementById("shareButton");
|
const shareButton = document.getElementById("shareButton");
|
||||||
if (value === 'none') {
|
if (value === 'none') {
|
||||||
shareButton.style.display = "none";
|
shareButton.style.display = "none";
|
||||||
@@ -1047,7 +1057,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
component_policy_combo.appendChild($el('option', { value: 'workflow', text: 'Component: Use workflow version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'workflow', text: 'Component: Use workflow version' }, []));
|
||||||
component_policy_combo.appendChild($el('option', { value: 'higher', text: 'Component: Use higher version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'higher', text: 'Component: Use higher version' }, []));
|
||||||
component_policy_combo.appendChild($el('option', { value: 'mine', text: 'Component: Use my version' }, []));
|
component_policy_combo.appendChild($el('option', { value: 'mine', text: 'Component: Use my version' }, []));
|
||||||
api.fetchApi('/v2/manager/policy/component')
|
api.fetchApi('/manager/policy/component')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
component_policy_combo.value = data;
|
component_policy_combo.value = data;
|
||||||
@@ -1055,7 +1065,7 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
});
|
});
|
||||||
|
|
||||||
component_policy_combo.addEventListener('change', function (event) {
|
component_policy_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/policy/component?value=${event.target.value}`);
|
api.fetchApi(`/manager/policy/component?value=${event.target.value}`);
|
||||||
set_component_policy(event.target.value);
|
set_component_policy(event.target.value);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1068,14 +1078,14 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
update_policy_combo.className = "cm-menu-combo";
|
update_policy_combo.className = "cm-menu-combo";
|
||||||
update_policy_combo.appendChild($el('option', { value: 'stable-comfyui', text: 'Update: ComfyUI Stable Version' }, []));
|
update_policy_combo.appendChild($el('option', { value: 'stable-comfyui', text: 'Update: ComfyUI Stable Version' }, []));
|
||||||
update_policy_combo.appendChild($el('option', { value: 'nightly-comfyui', text: 'Update: ComfyUI Nightly Version' }, []));
|
update_policy_combo.appendChild($el('option', { value: 'nightly-comfyui', text: 'Update: ComfyUI Nightly Version' }, []));
|
||||||
api.fetchApi('/v2/manager/policy/update')
|
api.fetchApi('/manager/policy/update')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
update_policy_combo.value = data;
|
update_policy_combo.value = data;
|
||||||
});
|
});
|
||||||
|
|
||||||
update_policy_combo.addEventListener('change', function (event) {
|
update_policy_combo.addEventListener('change', function (event) {
|
||||||
api.fetchApi(`/v2/manager/policy/update?value=${event.target.value}`);
|
api.fetchApi(`/manager/policy/update?value=${event.target.value}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
return [
|
return [
|
||||||
@@ -1378,12 +1388,12 @@ class ManagerMenuDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getVersion() {
|
async function getVersion() {
|
||||||
let version = await api.fetchApi(`/v2/manager/version`);
|
let version = await api.fetchApi(`/manager/version`);
|
||||||
return await version.text();
|
return await version.text();
|
||||||
}
|
}
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.ManagerMenu",
|
name: "Comfy.ManagerMenu",
|
||||||
|
|
||||||
aboutPageBadges: [
|
aboutPageBadges: [
|
||||||
{
|
{
|
||||||
@@ -1514,6 +1524,8 @@ app.registerExtension({
|
|||||||
tooltip: "Share"
|
tooltip: "Share"
|
||||||
}).element
|
}).element
|
||||||
);
|
);
|
||||||
|
|
||||||
|
app.menu?.settingsGroup.element.before(cmGroup.element);
|
||||||
}
|
}
|
||||||
catch(exception) {
|
catch(exception) {
|
||||||
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
console.log('ComfyUI is outdated. New style menu based features are disabled.');
|
||||||
@@ -172,7 +172,7 @@ export const shareToEsheep= () => {
|
|||||||
const nodes = app.graph._nodes
|
const nodes = app.graph._nodes
|
||||||
const { potential_outputs, potential_output_nodes } = getPotentialOutputsAndOutputNodes(nodes);
|
const { potential_outputs, potential_output_nodes } = getPotentialOutputsAndOutputNodes(nodes);
|
||||||
const workflow = prompt['workflow']
|
const workflow = prompt['workflow']
|
||||||
api.fetchApi(`/v2/manager/set_esheep_workflow_and_images`, {
|
api.fetchApi(`/manager/set_esheep_workflow_and_images`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -552,20 +552,6 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
|
this.matrix_destination_checkbox.style.color = "var(--fg-color)";
|
||||||
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
|
this.matrix_destination_checkbox.checked = this.share_option === 'matrix'; //true;
|
||||||
|
|
||||||
try {
|
|
||||||
api.fetchApi(`/v2/manager/get_matrix_dep_status`)
|
|
||||||
.then(response => response.text())
|
|
||||||
.then(data => {
|
|
||||||
if(data == 'unavailable') {
|
|
||||||
matrix_destination_checkbox_text.style.textDecoration = "line-through";
|
|
||||||
this.matrix_destination_checkbox.disabled = true;
|
|
||||||
this.matrix_destination_checkbox.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
|
||||||
matrix_destination_checkbox_text.title = "It has been disabled because the 'matrix-nio' dependency is not installed. Please install this dependency to use the matrix sharing feature.";
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch(error => {});
|
|
||||||
} catch (error) {}
|
|
||||||
|
|
||||||
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
|
this.comfyworkflows_destination_checkbox = $el("input", { type: 'checkbox', id: "comfyworkflows_destination" }, [])
|
||||||
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
|
const comfyworkflows_destination_checkbox_text = $el("label", {}, [" ComfyWorkflows.com"])
|
||||||
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
|
this.comfyworkflows_destination_checkbox.style.color = "var(--fg-color)";
|
||||||
@@ -826,7 +812,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
// get the user's existing matrix auth and share key
|
// get the user's existing matrix auth and share key
|
||||||
ShareDialog.matrix_auth = { homeserver: "matrix.org", username: "", password: "" };
|
ShareDialog.matrix_auth = { homeserver: "matrix.org", username: "", password: "" };
|
||||||
try {
|
try {
|
||||||
api.fetchApi(`/v2/manager/get_matrix_auth`)
|
api.fetchApi(`/manager/get_matrix_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
ShareDialog.matrix_auth = data;
|
ShareDialog.matrix_auth = data;
|
||||||
@@ -845,7 +831,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
ShareDialog.cw_sharekey = "";
|
ShareDialog.cw_sharekey = "";
|
||||||
try {
|
try {
|
||||||
// console.log("Fetching comfyworkflows share key")
|
// console.log("Fetching comfyworkflows share key")
|
||||||
api.fetchApi(`/v2/manager/get_comfyworkflows_auth`)
|
api.fetchApi(`/manager/get_comfyworkflows_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
ShareDialog.cw_sharekey = data.comfyworkflows_sharekey;
|
ShareDialog.cw_sharekey = data.comfyworkflows_sharekey;
|
||||||
@@ -905,7 +891,7 @@ export class ShareDialog extends ComfyDialog {
|
|||||||
// Change the text of the share button to "Sharing..." to indicate that the share process has started
|
// Change the text of the share button to "Sharing..." to indicate that the share process has started
|
||||||
this.share_button.textContent = "Sharing...";
|
this.share_button.textContent = "Sharing...";
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/manager/share`, {
|
const response = await api.fetchApi(`/manager/share`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -67,7 +67,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
async readKey() {
|
async readKey() {
|
||||||
let key = ""
|
let key = ""
|
||||||
try {
|
try {
|
||||||
key = await api.fetchApi(`/v2/manager/get_openart_auth`)
|
key = await api.fetchApi(`/manager/get_openart_auth`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
return data.openart_key;
|
return data.openart_key;
|
||||||
@@ -82,7 +82,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async saveKey(value) {
|
async saveKey(value) {
|
||||||
await api.fetchApi(`/v2/manager/set_openart_auth`, {
|
await api.fetchApi(`/manager/set_openart_auth`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -399,7 +399,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
form.append("file", uploadFile);
|
form.append("file", uploadFile);
|
||||||
try {
|
try {
|
||||||
const res = await this.fetchApi(
|
const res = await this.fetchApi(
|
||||||
`/v2/workflows/upload_thumbnail`,
|
`/workflows/upload_thumbnail`,
|
||||||
{
|
{
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: form,
|
body: form,
|
||||||
@@ -459,7 +459,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
throw new Error("Title is required");
|
throw new Error("Title is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
const current_snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
|
const current_snapshot = await api.fetchApi(`/snapshot/get_current`)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
// console.log(error);
|
// console.log(error);
|
||||||
@@ -489,7 +489,7 @@ export class OpenArtShareDialog extends ComfyDialog {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await this.fetchApi(
|
const response = await this.fetchApi(
|
||||||
"/v2/workflows/publish",
|
"/workflows/publish",
|
||||||
{
|
{
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {"Content-Type": "application/json"},
|
headers: {"Content-Type": "application/json"},
|
||||||
@@ -179,7 +179,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
async loadToken() {
|
async loadToken() {
|
||||||
let key = ""
|
let key = ""
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/manager/youml/settings`)
|
const response = await api.fetchApi(`/manager/youml/settings`)
|
||||||
const settings = await response.json()
|
const settings = await response.json()
|
||||||
return settings.token
|
return settings.token
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -188,7 +188,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async saveToken(value) {
|
async saveToken(value) {
|
||||||
await api.fetchApi(`/v2/manager/youml/settings`, {
|
await api.fetchApi(`/manager/youml/settings`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
@@ -380,7 +380,7 @@ export class YouMLShareDialog extends ComfyDialog {
|
|||||||
try {
|
try {
|
||||||
let snapshotData = null;
|
let snapshotData = null;
|
||||||
try {
|
try {
|
||||||
const snapshot = await api.fetchApi(`/v2/snapshot/get_current`)
|
const snapshot = await api.fetchApi(`/snapshot/get_current`)
|
||||||
snapshotData = await snapshot.json()
|
snapshotData = await snapshot.json()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to get snapshot", e)
|
console.error("Failed to get snapshot", e)
|
||||||
@@ -172,7 +172,7 @@ export function rebootAPI() {
|
|||||||
customConfirm("Are you sure you'd like to reboot the server?").then((isConfirmed) => {
|
customConfirm("Are you sure you'd like to reboot the server?").then((isConfirmed) => {
|
||||||
if (isConfirmed) {
|
if (isConfirmed) {
|
||||||
try {
|
try {
|
||||||
api.fetchApi("/v2/manager/reboot");
|
api.fetchApi("/manager/reboot");
|
||||||
}
|
}
|
||||||
catch(exception) {}
|
catch(exception) {}
|
||||||
}
|
}
|
||||||
@@ -210,7 +210,7 @@ export async function install_pip(packages) {
|
|||||||
if(packages.includes('&'))
|
if(packages.includes('&'))
|
||||||
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
|
app.ui.dialog.show(`Invalid PIP package enumeration: '${packages}'`);
|
||||||
|
|
||||||
const res = await api.fetchApi("/v2/customnode/install/pip", {
|
const res = await api.fetchApi("/customnode/install/pip", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: packages,
|
body: packages,
|
||||||
});
|
});
|
||||||
@@ -245,7 +245,7 @@ export async function install_via_git_url(url, manager_dialog) {
|
|||||||
|
|
||||||
show_message(`Wait...<BR><BR>Installing '${url}'`);
|
show_message(`Wait...<BR><BR>Installing '${url}'`);
|
||||||
|
|
||||||
const res = await api.fetchApi("/v2/customnode/install/git_url", {
|
const res = await api.fetchApi("/customnode/install/git_url", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: url,
|
body: url,
|
||||||
});
|
});
|
||||||
@@ -630,14 +630,6 @@ export function showTooltip(target, text, className = 'cn-tooltip', styleMap = {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateUUID() {
|
|
||||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
|
||||||
const r = Math.random() * 16 | 0;
|
|
||||||
const v = c === 'x' ? r : (r & 0x3 | 0x8);
|
|
||||||
return v.toString(16);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function initTooltip () {
|
function initTooltip () {
|
||||||
const mouseenterHandler = (e) => {
|
const mouseenterHandler = (e) => {
|
||||||
const target = e.target;
|
const target = e.target;
|
||||||
@@ -64,7 +64,7 @@ function storeGroupNode(name, data, register=true) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function load_components() {
|
export async function load_components() {
|
||||||
let data = await api.fetchApi('/v2/manager/component/loads', {method: "POST"});
|
let data = await api.fetchApi('/manager/component/loads', {method: "POST"});
|
||||||
let components = await data.json();
|
let components = await data.json();
|
||||||
|
|
||||||
let start_time = Date.now();
|
let start_time = Date.now();
|
||||||
@@ -222,7 +222,7 @@ async function save_as_component(node, version, author, prefix, nodename, packna
|
|||||||
pack_map[packname] = component_name;
|
pack_map[packname] = component_name;
|
||||||
rpack_map[component_name] = subgraph;
|
rpack_map[component_name] = subgraph;
|
||||||
|
|
||||||
const res = await api.fetchApi('/v2/manager/component/save', {
|
const res = await api.fetchApi('/manager/component/save', {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
@@ -259,7 +259,7 @@ async function import_component(component_name, component, mode) {
|
|||||||
workflow: component
|
workflow: component
|
||||||
};
|
};
|
||||||
|
|
||||||
const res = await api.fetchApi('/v2/manager/component/save', {
|
const res = await api.fetchApi('/manager/component/save', {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: { "Content-Type": "application/json", },
|
headers: { "Content-Type": "application/json", },
|
||||||
body: JSON.stringify(body)
|
body: JSON.stringify(body)
|
||||||
@@ -709,7 +709,7 @@ app.handleFile = handleFile;
|
|||||||
|
|
||||||
let current_component_policy = 'workflow';
|
let current_component_policy = 'workflow';
|
||||||
try {
|
try {
|
||||||
api.fetchApi('/v2/manager/policy/component')
|
api.fetchApi('/manager/policy/component')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(data => { current_component_policy = data; });
|
.then(data => { current_component_policy = data; });
|
||||||
}
|
}
|
||||||
@@ -7,7 +7,7 @@ import {
|
|||||||
fetchData, md5, icons, show_message, customConfirm, customAlert, customPrompt,
|
fetchData, md5, icons, show_message, customConfirm, customAlert, customPrompt,
|
||||||
sanitizeHTML, infoToast, showTerminal, setNeedRestart,
|
sanitizeHTML, infoToast, showTerminal, setNeedRestart,
|
||||||
storeColumnWidth, restoreColumnWidth, getTimeAgo, copyText, loadCss,
|
storeColumnWidth, restoreColumnWidth, getTimeAgo, copyText, loadCss,
|
||||||
showPopover, hidePopover, generateUUID
|
showPopover, hidePopover
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
|
|
||||||
// https://cenfun.github.io/turbogrid/api.html
|
// https://cenfun.github.io/turbogrid/api.html
|
||||||
@@ -66,7 +66,7 @@ export class CustomNodesManager {
|
|||||||
this.id = "cn-manager";
|
this.id = "cn-manager";
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.CustomNodesManager",
|
name: "Comfy.CustomNodesManager",
|
||||||
afterConfigureGraph: (missingNodeTypes) => {
|
afterConfigureGraph: (missingNodeTypes) => {
|
||||||
const item = this.getFilterItem(ShowMode.MISSING);
|
const item = this.getFilterItem(ShowMode.MISSING);
|
||||||
if (item) {
|
if (item) {
|
||||||
@@ -459,7 +459,7 @@ export class CustomNodesManager {
|
|||||||
|
|
||||||
".cn-manager-stop": {
|
".cn-manager-stop": {
|
||||||
click: () => {
|
click: () => {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -635,7 +635,7 @@ export class CustomNodesManager {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await api.fetchApi(`/v2/customnode/import_fail_info`, {
|
const response = await api.fetchApi(`/customnode/import_fail_info`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(info)
|
body: JSON.stringify(info)
|
||||||
@@ -1244,7 +1244,7 @@ export class CustomNodesManager {
|
|||||||
async loadNodes(node_packs) {
|
async loadNodes(node_packs) {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading node mappings (${mode}) ...`);
|
this.showStatus(`Loading node mappings (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/getmappings?mode=${mode}`);
|
const res = await fetchData(`/customnode/getmappings?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
console.log(res.error);
|
console.log(res.error);
|
||||||
return;
|
return;
|
||||||
@@ -1396,10 +1396,10 @@ export class CustomNodesManager {
|
|||||||
this.showLoading();
|
this.showLoading();
|
||||||
let res;
|
let res;
|
||||||
if(is_enable) {
|
if(is_enable) {
|
||||||
res = await api.fetchApi(`/v2/customnode/disabled_versions/${node_id}`, { cache: "no-store" });
|
res = await api.fetchApi(`/customnode/disabled_versions/${node_id}`, { cache: "no-store" });
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
res = await api.fetchApi(`/v2/customnode/versions/${node_id}`, { cache: "no-store" });
|
res = await api.fetchApi(`/customnode/versions/${node_id}`, { cache: "no-store" });
|
||||||
}
|
}
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
|
|
||||||
@@ -1441,6 +1441,13 @@ export class CustomNodesManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async installNodes(list, btn, title, selected_version) {
|
async installNodes(list, btn, title, selected_version) {
|
||||||
|
let stats = await api.fetchApi('/manager/queue/status');
|
||||||
|
stats = await stats.json();
|
||||||
|
if(stats.is_processing) {
|
||||||
|
customAlert(`[ComfyUI-Manager] There are already tasks in progress. Please try again after it is completed. (${stats.done_count}/${stats.total_count})`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const { target, label, mode} = btn;
|
const { target, label, mode} = btn;
|
||||||
|
|
||||||
if(mode === "uninstall") {
|
if(mode === "uninstall") {
|
||||||
@@ -1467,9 +1474,9 @@ export class CustomNodesManager {
|
|||||||
let needRestart = false;
|
let needRestart = false;
|
||||||
let errorMsg = "";
|
let errorMsg = "";
|
||||||
|
|
||||||
let target_items = [];
|
await api.fetchApi('/manager/queue/reset');
|
||||||
|
|
||||||
let batch = {};
|
let target_items = [];
|
||||||
|
|
||||||
for (const hash of list) {
|
for (const hash of list) {
|
||||||
const item = this.grid.getRowItemBy("hash", hash);
|
const item = this.grid.getRowItemBy("hash", hash);
|
||||||
@@ -1512,11 +1519,23 @@ export class CustomNodesManager {
|
|||||||
api_mode = 'reinstall';
|
api_mode = 'reinstall';
|
||||||
}
|
}
|
||||||
|
|
||||||
if(batch[api_mode]) {
|
const res = await api.fetchApi(`/manager/queue/${api_mode}`, {
|
||||||
batch[api_mode].push(data);
|
method: 'POST',
|
||||||
}
|
body: JSON.stringify(data)
|
||||||
else {
|
});
|
||||||
batch[api_mode] = [data];
|
|
||||||
|
if (res.status != 200) {
|
||||||
|
errorMsg = `'${item.title}': `;
|
||||||
|
|
||||||
|
if(res.status == 403) {
|
||||||
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
} else if(res.status == 404) {
|
||||||
|
errorMsg += `With the current security level configuration, only custom nodes from the <B>"default channel"</B> can be installed.\n`;
|
||||||
|
} else {
|
||||||
|
errorMsg += await res.text() + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1533,24 +1552,7 @@ export class CustomNodesManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.batch_id = generateUUID();
|
await api.fetchApi('/manager/queue/start');
|
||||||
batch['batch_id'] = this.batch_id;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
|
|
||||||
let failed = await res.json();
|
|
||||||
|
|
||||||
if(failed.length > 0) {
|
|
||||||
for(let k in failed) {
|
|
||||||
let hash = failed[k];
|
|
||||||
const item = this.grid.getRowItemBy("hash", hash);
|
|
||||||
errorMsg = `[FAIL] ${item.title}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.showStop();
|
this.showStop();
|
||||||
showTerminal();
|
showTerminal();
|
||||||
}
|
}
|
||||||
@@ -1558,9 +1560,6 @@ export class CustomNodesManager {
|
|||||||
|
|
||||||
async onQueueStatus(event) {
|
async onQueueStatus(event) {
|
||||||
let self = CustomNodesManager.instance;
|
let self = CustomNodesManager.instance;
|
||||||
// If legacy manager front is not open, return early (using new manager front)
|
|
||||||
if (self.element?.style.display === 'none') return
|
|
||||||
|
|
||||||
if(event.detail.status == 'in_progress' && event.detail.ui_target == 'nodepack_manager') {
|
if(event.detail.status == 'in_progress' && event.detail.ui_target == 'nodepack_manager') {
|
||||||
const hash = event.detail.target;
|
const hash = event.detail.target;
|
||||||
|
|
||||||
@@ -1571,7 +1570,7 @@ export class CustomNodesManager {
|
|||||||
self.grid.updateCell(item, "action");
|
self.grid.updateCell(item, "action");
|
||||||
self.grid.setRowSelected(item, false);
|
self.grid.setRowSelected(item, false);
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'batch-done' && event.detail.batch_id == self.batch_id) {
|
else if(event.detail.status == 'done') {
|
||||||
self.hideStop();
|
self.hideStop();
|
||||||
self.onQueueCompleted(event.detail);
|
self.onQueueCompleted(event.detail);
|
||||||
}
|
}
|
||||||
@@ -1765,7 +1764,7 @@ export class CustomNodesManager {
|
|||||||
async getMissingNodesLegacy(hashMap, missing_nodes) {
|
async getMissingNodesLegacy(hashMap, missing_nodes) {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading missing nodes (${mode}) ...`);
|
this.showStatus(`Loading missing nodes (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/getmappings?mode=${mode}`);
|
const res = await fetchData(`/customnode/getmappings?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError(`Failed to get custom node mappings: ${res.error}`);
|
this.showError(`Failed to get custom node mappings: ${res.error}`);
|
||||||
return;
|
return;
|
||||||
@@ -1880,7 +1879,7 @@ export class CustomNodesManager {
|
|||||||
async getAlternatives() {
|
async getAlternatives() {
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
this.showStatus(`Loading alternatives (${mode}) ...`);
|
this.showStatus(`Loading alternatives (${mode}) ...`);
|
||||||
const res = await fetchData(`/v2/customnode/alternatives?mode=${mode}`);
|
const res = await fetchData(`/customnode/alternatives?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError(`Failed to get alternatives: ${res.error}`);
|
this.showError(`Failed to get alternatives: ${res.error}`);
|
||||||
return [];
|
return [];
|
||||||
@@ -1928,7 +1927,7 @@ export class CustomNodesManager {
|
|||||||
infoToast('Fetching updated information. This may take some time if many custom nodes are installed.');
|
infoToast('Fetching updated information. This may take some time if many custom nodes are installed.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = await fetchData(`/v2/customnode/getlist?mode=${mode}${skip_update}`);
|
const res = await fetchData(`/customnode/getlist?mode=${mode}${skip_update}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError("Failed to get custom node list.");
|
this.showError("Failed to get custom node list.");
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
@@ -3,7 +3,7 @@ import { $el } from "../../scripts/ui.js";
|
|||||||
import {
|
import {
|
||||||
manager_instance, rebootAPI,
|
manager_instance, rebootAPI,
|
||||||
fetchData, md5, icons, show_message, customAlert, infoToast, showTerminal,
|
fetchData, md5, icons, show_message, customAlert, infoToast, showTerminal,
|
||||||
storeColumnWidth, restoreColumnWidth, loadCss, generateUUID
|
storeColumnWidth, restoreColumnWidth, loadCss
|
||||||
} from "./common.js";
|
} from "./common.js";
|
||||||
import { api } from "../../scripts/api.js";
|
import { api } from "../../scripts/api.js";
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ export class ModelManager {
|
|||||||
|
|
||||||
".cmm-manager-stop": {
|
".cmm-manager-stop": {
|
||||||
click: () => {
|
click: () => {
|
||||||
api.fetchApi('/v2/manager/queue/reset');
|
api.fetchApi('/manager/queue/reset');
|
||||||
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
infoToast('Cancel', 'Remaining tasks will stop after completing the current task.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -435,15 +435,23 @@ export class ModelManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async installModels(list, btn) {
|
async installModels(list, btn) {
|
||||||
|
let stats = await api.fetchApi('/manager/queue/status');
|
||||||
|
|
||||||
|
stats = await stats.json();
|
||||||
|
if(stats.is_processing) {
|
||||||
|
customAlert(`[ComfyUI-Manager] There are already tasks in progress. Please try again after it is completed. (${stats.done_count}/${stats.total_count})`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
btn.classList.add("cmm-btn-loading");
|
btn.classList.add("cmm-btn-loading");
|
||||||
this.showError("");
|
this.showError("");
|
||||||
|
|
||||||
let needRefresh = false;
|
let needRefresh = false;
|
||||||
let errorMsg = "";
|
let errorMsg = "";
|
||||||
|
|
||||||
let target_items = [];
|
await api.fetchApi('/manager/queue/reset');
|
||||||
|
|
||||||
let batch = {};
|
let target_items = [];
|
||||||
|
|
||||||
for (const item of list) {
|
for (const item of list) {
|
||||||
this.grid.scrollRowIntoView(item);
|
this.grid.scrollRowIntoView(item);
|
||||||
@@ -460,12 +468,21 @@ export class ModelManager {
|
|||||||
const data = item.originalData;
|
const data = item.originalData;
|
||||||
data.ui_id = item.hash;
|
data.ui_id = item.hash;
|
||||||
|
|
||||||
|
const res = await api.fetchApi(`/manager/queue/install_model`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify(data)
|
||||||
|
});
|
||||||
|
|
||||||
if(batch['install_model']) {
|
if (res.status != 200) {
|
||||||
batch['install_model'].push(data);
|
errorMsg = `'${item.name}': `;
|
||||||
}
|
|
||||||
else {
|
if(res.status == 403) {
|
||||||
batch['install_model'] = [data];
|
errorMsg += `This action is not allowed with this security level configuration.\n`;
|
||||||
|
} else {
|
||||||
|
errorMsg += await res.text() + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -482,24 +499,7 @@ export class ModelManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.batch_id = generateUUID();
|
await api.fetchApi('/manager/queue/start');
|
||||||
batch['batch_id'] = this.batch_id;
|
|
||||||
|
|
||||||
const res = await api.fetchApi(`/v2/manager/queue/batch`, {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(batch)
|
|
||||||
});
|
|
||||||
|
|
||||||
let failed = await res.json();
|
|
||||||
|
|
||||||
if(failed.length > 0) {
|
|
||||||
for(let k in failed) {
|
|
||||||
let hash = failed[k];
|
|
||||||
const item = self.grid.getRowItemBy("hash", hash);
|
|
||||||
errorMsg = `[FAIL] ${item.title}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.showStop();
|
this.showStop();
|
||||||
showTerminal();
|
showTerminal();
|
||||||
}
|
}
|
||||||
@@ -519,7 +519,7 @@ export class ModelManager {
|
|||||||
// self.grid.updateCell(item, "tg-column-select");
|
// self.grid.updateCell(item, "tg-column-select");
|
||||||
self.grid.updateRow(item);
|
self.grid.updateRow(item);
|
||||||
}
|
}
|
||||||
else if(event.detail.status == 'batch-done') {
|
else if(event.detail.status == 'done') {
|
||||||
self.hideStop();
|
self.hideStop();
|
||||||
self.onQueueCompleted(event.detail);
|
self.onQueueCompleted(event.detail);
|
||||||
}
|
}
|
||||||
@@ -645,7 +645,7 @@ export class ModelManager {
|
|||||||
|
|
||||||
const mode = manager_instance.datasrc_combo.value;
|
const mode = manager_instance.datasrc_combo.value;
|
||||||
|
|
||||||
const res = await fetchData(`/v2/externalmodel/getlist?mode=${mode}`);
|
const res = await fetchData(`/externalmodel/getlist?mode=${mode}`);
|
||||||
if (res.error) {
|
if (res.error) {
|
||||||
this.showError("Failed to get external model list.");
|
this.showError("Failed to get external model list.");
|
||||||
this.hideLoading();
|
this.hideLoading();
|
||||||
@@ -142,7 +142,7 @@ function node_info_copy(src, dest, connect_both, copy_shape) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
app.registerExtension({
|
app.registerExtension({
|
||||||
name: "Comfy.Legacy.Manager.NodeFixer",
|
name: "Comfy.Manager.NodeFixer",
|
||||||
beforeRegisterNodeDef(nodeType, nodeData, app) {
|
beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||||
addMenuHandler(nodeType, function (_, options) {
|
addMenuHandler(nodeType, function (_, options) {
|
||||||
options.push({
|
options.push({
|
||||||
@@ -7,7 +7,7 @@ import { manager_instance, rebootAPI, show_message } from "./common.js";
|
|||||||
async function restore_snapshot(target) {
|
async function restore_snapshot(target) {
|
||||||
if(SnapshotManager.instance) {
|
if(SnapshotManager.instance) {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/restore?target=${target}`, { cache: "no-store" });
|
const response = await api.fetchApi(`/snapshot/restore?target=${target}`, { cache: "no-store" });
|
||||||
|
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
show_message('This action is not allowed with this security level configuration.');
|
||||||
@@ -35,7 +35,7 @@ async function restore_snapshot(target) {
|
|||||||
async function remove_snapshot(target) {
|
async function remove_snapshot(target) {
|
||||||
if(SnapshotManager.instance) {
|
if(SnapshotManager.instance) {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/remove?target=${target}`, { cache: "no-store" });
|
const response = await api.fetchApi(`/snapshot/remove?target=${target}`, { cache: "no-store" });
|
||||||
|
|
||||||
if(response.status == 403) {
|
if(response.status == 403) {
|
||||||
show_message('This action is not allowed with this security level configuration.');
|
show_message('This action is not allowed with this security level configuration.');
|
||||||
@@ -61,7 +61,7 @@ async function remove_snapshot(target) {
|
|||||||
|
|
||||||
async function save_current_snapshot() {
|
async function save_current_snapshot() {
|
||||||
try {
|
try {
|
||||||
const response = await api.fetchApi('/v2/snapshot/save', { cache: "no-store" });
|
const response = await api.fetchApi('/snapshot/save', { cache: "no-store" });
|
||||||
app.ui.dialog.close();
|
app.ui.dialog.close();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -76,7 +76,7 @@ async function save_current_snapshot() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getSnapshotList() {
|
async function getSnapshotList() {
|
||||||
const response = await api.fetchApi(`/v2/snapshot/getlist`);
|
const response = await api.fetchApi(`/snapshot/getlist`);
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
@@ -38,7 +38,7 @@ class WorkflowMetadataExtension {
|
|||||||
* enabled is true if the node is enabled, false if it is disabled
|
* enabled is true if the node is enabled, false if it is disabled
|
||||||
*/
|
*/
|
||||||
async getInstalledNodes() {
|
async getInstalledNodes() {
|
||||||
const res = await api.fetchApi("/v2/customnode/installed");
|
const res = await api.fetchApi("/customnode/installed");
|
||||||
return await res.json();
|
return await res.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
137
monitor_test.sh
137
monitor_test.sh
@@ -1,137 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# ============================================================================
|
|
||||||
# Test Monitoring Script
|
|
||||||
# ============================================================================
|
|
||||||
# Monitors background test execution and reports status/failures
|
|
||||||
# Usage: ./monitor_test.sh <log_file> <timeout_seconds>
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
LOG_FILE="${1:-/tmp/test-param-fix.log}"
|
|
||||||
TIMEOUT="${2:-600}" # Default 10 minutes
|
|
||||||
CHECK_INTERVAL=10 # Check every 10 seconds
|
|
||||||
STALL_THRESHOLD=60 # Consider stalled if no new output for 60 seconds
|
|
||||||
|
|
||||||
# Colors
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
BLUE='\033[0;34m'
|
|
||||||
NC='\033[0m'
|
|
||||||
|
|
||||||
echo -e "${BLUE}========================================${NC}"
|
|
||||||
echo -e "${BLUE}Test Monitor Started${NC}"
|
|
||||||
echo -e "${BLUE}========================================${NC}"
|
|
||||||
echo -e "${BLUE}Log File: ${LOG_FILE}${NC}"
|
|
||||||
echo -e "${BLUE}Timeout: ${TIMEOUT}s${NC}"
|
|
||||||
echo -e "${BLUE}Stall Threshold: ${STALL_THRESHOLD}s${NC}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
START_TIME=$(date +%s)
|
|
||||||
LAST_SIZE=0
|
|
||||||
LAST_CHANGE_TIME=$START_TIME
|
|
||||||
STATUS="running"
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
CURRENT_TIME=$(date +%s)
|
|
||||||
ELAPSED=$((CURRENT_TIME - START_TIME))
|
|
||||||
|
|
||||||
# Check if log file exists
|
|
||||||
if [ ! -f "$LOG_FILE" ]; then
|
|
||||||
echo -e "${YELLOW}[$(date '+%H:%M:%S')] Waiting for log file...${NC}"
|
|
||||||
sleep $CHECK_INTERVAL
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check file size
|
|
||||||
CURRENT_SIZE=$(wc -c < "$LOG_FILE" 2>/dev/null || echo "0")
|
|
||||||
TIME_SINCE_CHANGE=$((CURRENT_TIME - LAST_CHANGE_TIME))
|
|
||||||
|
|
||||||
# Check if file size changed (progress)
|
|
||||||
if [ "$CURRENT_SIZE" -gt "$LAST_SIZE" ]; then
|
|
||||||
LAST_SIZE=$CURRENT_SIZE
|
|
||||||
LAST_CHANGE_TIME=$CURRENT_TIME
|
|
||||||
|
|
||||||
# Show latest lines
|
|
||||||
echo -e "${GREEN}[$(date '+%H:%M:%S')] Progress detected (${CURRENT_SIZE} bytes, +${ELAPSED}s)${NC}"
|
|
||||||
tail -3 "$LOG_FILE" | sed 's/\x1b\[[0-9;]*m//g' # Remove color codes
|
|
||||||
echo ""
|
|
||||||
else
|
|
||||||
# No progress
|
|
||||||
echo -e "${YELLOW}[$(date '+%H:%M:%S')] No change (stalled ${TIME_SINCE_CHANGE}s)${NC}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check for completion markers
|
|
||||||
if grep -q "✅ ComfyUI_.*: PASSED" "$LOG_FILE" 2>/dev/null || \
|
|
||||||
grep -q "❌ ComfyUI_.*: FAILED" "$LOG_FILE" 2>/dev/null || \
|
|
||||||
grep -q "Test Suite Complete" "$LOG_FILE" 2>/dev/null; then
|
|
||||||
|
|
||||||
echo -e "${GREEN}========================================${NC}"
|
|
||||||
echo -e "${GREEN}Tests Completed!${NC}"
|
|
||||||
echo -e "${GREEN}========================================${NC}"
|
|
||||||
|
|
||||||
# Show summary
|
|
||||||
grep -E "passed|failed|PASSED|FAILED" "$LOG_FILE" | tail -20
|
|
||||||
|
|
||||||
# Check if tests passed
|
|
||||||
if grep -q "❌.*FAILED" "$LOG_FILE" 2>/dev/null; then
|
|
||||||
echo -e "${RED}❌ Some tests FAILED${NC}"
|
|
||||||
STATUS="failed"
|
|
||||||
else
|
|
||||||
echo -e "${GREEN}✅ All tests PASSED${NC}"
|
|
||||||
STATUS="success"
|
|
||||||
fi
|
|
||||||
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check for errors
|
|
||||||
if grep -qi "error\|exception\|traceback" "$LOG_FILE" 2>/dev/null; then
|
|
||||||
LAST_ERROR=$(grep -i "error\|exception" "$LOG_FILE" | tail -1)
|
|
||||||
echo -e "${RED}[$(date '+%H:%M:%S')] Error detected: ${LAST_ERROR}${NC}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check for stall (no progress for STALL_THRESHOLD seconds)
|
|
||||||
if [ "$TIME_SINCE_CHANGE" -gt "$STALL_THRESHOLD" ]; then
|
|
||||||
echo -e "${RED}========================================${NC}"
|
|
||||||
echo -e "${RED}⚠️ Test Execution STALLED${NC}"
|
|
||||||
echo -e "${RED}========================================${NC}"
|
|
||||||
echo -e "${RED}No progress for ${TIME_SINCE_CHANGE} seconds${NC}"
|
|
||||||
echo -e "${RED}Last output:${NC}"
|
|
||||||
tail -10 "$LOG_FILE" | sed 's/\x1b\[[0-9;]*m//g'
|
|
||||||
|
|
||||||
STATUS="stalled"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check for timeout
|
|
||||||
if [ "$ELAPSED" -gt "$TIMEOUT" ]; then
|
|
||||||
echo -e "${RED}========================================${NC}"
|
|
||||||
echo -e "${RED}⏰ Test Execution TIMEOUT${NC}"
|
|
||||||
echo -e "${RED}========================================${NC}"
|
|
||||||
echo -e "${RED}Exceeded ${TIMEOUT}s timeout${NC}"
|
|
||||||
|
|
||||||
STATUS="timeout"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Wait before next check
|
|
||||||
sleep $CHECK_INTERVAL
|
|
||||||
done
|
|
||||||
|
|
||||||
# Final status
|
|
||||||
echo ""
|
|
||||||
echo -e "${BLUE}========================================${NC}"
|
|
||||||
echo -e "${BLUE}Final Status: ${STATUS}${NC}"
|
|
||||||
echo -e "${BLUE}Total Time: ${ELAPSED}s${NC}"
|
|
||||||
echo -e "${BLUE}========================================${NC}"
|
|
||||||
|
|
||||||
# Exit with appropriate code
|
|
||||||
case "$STATUS" in
|
|
||||||
"success") exit 0 ;;
|
|
||||||
"failed") exit 1 ;;
|
|
||||||
"stalled") exit 2 ;;
|
|
||||||
"timeout") exit 3 ;;
|
|
||||||
*) exit 99 ;;
|
|
||||||
esac
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,15 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
|
{
|
||||||
|
"author": "synchronicity-labs",
|
||||||
|
"title": "ComfyUI Sync Lipsync Node",
|
||||||
|
"reference": "https://github.com/synchronicity-labs/sync-comfyui",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/synchronicity-labs/sync-comfyui"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This custom node allows you to perform audio-video lip synchronization inside ComfyUI using a simple interface."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "joaomede",
|
"author": "joaomede",
|
||||||
"title": "ComfyUI-Unload-Model-Fork",
|
"title": "ComfyUI-Unload-Model-Fork",
|
||||||
|
|||||||
@@ -1,5 +1,460 @@
|
|||||||
{
|
{
|
||||||
"custom_nodes": [
|
"custom_nodes": [
|
||||||
|
{
|
||||||
|
"author": "AlfredClark",
|
||||||
|
"title": "ComfyUI-ModelSpec [REMOVED]",
|
||||||
|
"reference": "https://github.com/AlfredClark/ComfyUI-ModelSpec",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/AlfredClark/ComfyUI-ModelSpec"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI model metadata editing nodes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "VraethrDalkr",
|
||||||
|
"title": "ComfyUI-ProgressiveBlend [REMOVED]",
|
||||||
|
"reference": "https://github.com/VraethrDalkr/ComfyUI-ProgressiveBlend",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/VraethrDalkr/ComfyUI-ProgressiveBlend"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A collection of custom nodes for ComfyUI that enable progressive blending and color matching effects across image batches/video frames."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "xmarked-ai",
|
||||||
|
"title": "ComfyUI_misc [REMOVED]",
|
||||||
|
"reference": "https://github.com/xmarked-ai/ComfyUI_misc",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/xmarked-ai/ComfyUI_misc"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: Ace IntegerX, Ace FloatX, Ace Color FixX, White Balance X, Depth Displace X, Empty Latent X, KSampler Combo X, ..."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "sm079",
|
||||||
|
"title": "ComfyUI-Face-Detection [REMOVED]",
|
||||||
|
"reference": "https://github.com/sm079/ComfyUI-Face-Detection",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/sm079/ComfyUI-Face-Detection"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "face detection nodes for comfyui"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "42lux",
|
||||||
|
"title": "ComfyUI-42lux [REMOVED]",
|
||||||
|
"reference": "https://github.com/42lux/ComfyUI-42lux",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/42lux/ComfyUI-42lux"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A collection of custom nodes for ComfyUI focused on enhanced sampling, model optimization, and quality improvements."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "lucak5s",
|
||||||
|
"title": "ComfyUI GFPGAN [REMOVED]",
|
||||||
|
"reference": "https://github.com/lucak5s/comfyui_gfpgan",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/lucak5s/comfyui_gfpgan"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Face restoration with GFPGAN."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "impactframes",
|
||||||
|
"title": "IF_AI_tools [DEPRECATED]",
|
||||||
|
"id": "impactframes-tools",
|
||||||
|
"reference": "https://github.com/if-ai/ComfyUI-IF_AI_tools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/if-ai/ComfyUI-IF_AI_tools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Various AI tools to use in Comfy UI. Starting with VL and prompt making tools using Ollma as backend will evolve as I find time."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "netroxin",
|
||||||
|
"title": "comfyui_netro [REMOVED]",
|
||||||
|
"reference": "https://github.com/netroxin/comfyui_netro",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/netroxin/comfyui_netro"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "#Camera Movement Prompt Node for ComfyUI\nThis custom node script for ComfyUI generates descriptive camera movement prompts based on user-selected movement options for Wan2.2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "aistudynow",
|
||||||
|
"title": "comfyui-HunyuanImage-2.1 [REMOVED]",
|
||||||
|
"reference": "https://github.com/aistudynow/comfyui-HunyuanImage-2.1",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/aistudynow/comfyui-HunyuanImage-2.1"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: Load HunyuanImage DiT, Load HunyuanImage VAE, Load HunyuanImage Dual Text Encoder, HunyuanImage Sampler, HunyuanImage VAE Decode, HunyuanImage CLIP Text Encode, Empty HunyuanImage Latent Image"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SlackinJack",
|
||||||
|
"title": "distrifuser_comfyui [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/SlackinJack/distrifuser_comfyui",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SlackinJack/distrifuser_comfyui"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "[a/Distrifuser](https://github.com/mit-han-lab/distrifuser) sampler node for ComfyUI\n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SlackinJack",
|
||||||
|
"title": "asyncdiff_comfyui [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/SlackinJack/asyncdiff_comfyui",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SlackinJack/asyncdiff_comfyui"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "AsyncDiff node for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "TheBill2001",
|
||||||
|
"title": "Save Images with Captions [REMOVED]",
|
||||||
|
"reference": "https://github.com/TheBill2001/ComfyUI-Save-Image-Caption",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/TheBill2001/ComfyUI-Save-Image-Caption"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Provide two custom nodes to load and save images with captions as separate files."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "ShmuelRonen",
|
||||||
|
"title": "ComfyUI Flux 1.1 Ultra & Raw Node [REMOVED]",
|
||||||
|
"reference": "https://github.com/ShmuelRonen/ComfyUI_Flux_1.1_RAW_API",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/ShmuelRonen/ComfyUI_Flux_1.1_RAW_API"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A ComfyUI custom node for Black Forest Labs' FLUX 1.1 [pro] API, supporting both regular and Ultra modes with optional Raw mode."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "mattwilliamson",
|
||||||
|
"title": "ComfyUI AI GameDev Nodes [UNSAFE/REMOVED]",
|
||||||
|
"reference": "https://github.com/mattwilliamson/comfyui-ai-gamedev",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/mattwilliamson/comfyui-ai-gamedev"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Custom ComfyUI nodes for AI-powered game asset generation, providing a comprehensive toolkit for game developers to create 3D models, animations, and audio assets using state-of-the-art AI models.[w/This node pack has an implementation that dynamically generates scripts.]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "manifestations",
|
||||||
|
"title": "ComfyUI Outfit Nodes [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/manifestations/comfyui-outfit",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/manifestations/comfyui-outfit"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Advanced, professional outfit and makeup generation nodes for ComfyUI, with dynamic UI and AI-powered prompt formatting."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Poukpalaova",
|
||||||
|
"title": "ComfyUI-FRED-Nodes [DEPRECATED]",
|
||||||
|
"reference": "https://github.com/Poukpalaova/ComfyUI-FRED-Nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Poukpalaova/ComfyUI-FRED-Nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Multiple nodes that ease the process.\nNOTE: The files in the repo are not organized."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "cwebbi1",
|
||||||
|
"title": "VoidCustomNodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/cwebbi1/VoidCustomNodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/cwebbi1/VoidCustomNodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES:Prompt Parser, String Combiner"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Shellishack",
|
||||||
|
"title": "ComfyUI Remote Media Loaders [REMOVED]",
|
||||||
|
"reference": "https://github.com/Shellishack/comfyui-remote-media-loaders",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Shellishack/comfyui-remote-media-loaders"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Load media (image/video/audio) from remote URL"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "D3lUX3I",
|
||||||
|
"title": "VideoPromptEnhancer [REMOVED]",
|
||||||
|
"reference": "https://github.com/D3lUX3I/ComfyUI-VideoPromptEnhancer",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/D3lUX3I/ComfyUI-VideoPromptEnhancer"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This node generates a professional prompt from an input text for modern video AI models (e.g., Alibaba Wan 2.2) via the OpenRouter API."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "perilli",
|
||||||
|
"title": "apw_nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/alessandroperilli/APW_Nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/alessandroperilli/APW_Nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A custom node suite to augment the capabilities of the [a/AP Workflows for ComfyUI](https://perilli.com/ai/comfyui/)\nNOTE: See [a/Open Creative Studio Nodes](https://github.com/alessandroperilli/OCS_Nodes)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "greengerong",
|
||||||
|
"title": "ComfyUI-Lumina-Video [REMOVED]",
|
||||||
|
"reference": "https://github.com/greengerong/ComfyUI-Lumina-Video",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/greengerong/ComfyUI-Lumina-Video"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is a video generation plugin implementation for ComfyUI based on the Lumina Video model."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SatadalAI",
|
||||||
|
"title": "Combined Upscale Node for ComfyUI [REMOVED]",
|
||||||
|
"reference": "https://github.com/SatadalAI/SATA_UtilityNode",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SatadalAI/SATA_UtilityNode"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Combined_Upscale is a custom ComfyUI node designed for high-quality image enhancement workflows. It intelligently combines model-based upscaling with efficient CPU-based resizing, offering granular control over output dimensions and quality. Ideal for asset pipelines, UI prototyping, and generative workflows.\nNOTE: The files in the repo are not organized."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "netroxin",
|
||||||
|
"title": "Netro_wildcards [REMOVED]",
|
||||||
|
"reference": "https://github.com/netroxin/comfyui_netro_wildcards",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/netroxin/comfyui_netro_wildcards"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Since I used 'simple wildcards' from Vanilla and it no longer works with the new Comfy UI version for me, I created an alternative. This CustomNode takes the entire contents of your wildcards-folder(comfyui wildcards) and creates a node for each one."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "takoyaki1118",
|
||||||
|
"title": "ComfyUI-MangaTools [REMOVED]",
|
||||||
|
"reference": "https://github.com/takoyaki1118/ComfyUI-MangaTools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/takoyaki1118/ComfyUI-MangaTools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "NODES: Manga Panel Detector, Manga Panel Dispatcher, GateImage, MangaPageAssembler"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "lucasgattas",
|
||||||
|
"title": "comfyui-egregora-regional [REMOVED]",
|
||||||
|
"reference": "https://github.com/lucasgattas/comfyui-egregora-regional",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/lucasgattas/comfyui-egregora-regional"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Image Tile Split with Region-Aware Prompting for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "lucasgattas",
|
||||||
|
"title": "comfyui-egregora-tiled [REMOVED]",
|
||||||
|
"reference": "https://github.com/lucasgattas/comfyui-egregora-tiled",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/lucasgattas/comfyui-egregora-tiled"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Tiled regional prompting + tiled VAE decode with seam-free blending for ComfyUI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Seedsa",
|
||||||
|
"title": "ComfyUI Fooocus Nodes [REMOVED]",
|
||||||
|
"id": "fooocus-nodes",
|
||||||
|
"reference": "https://github.com/Seedsa/Fooocus_Nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Seedsa/Fooocus_Nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This extension provides image generation features based on Fooocus."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "zhilemann",
|
||||||
|
"title": "ComfyUI-moondream2 [REMOVED]",
|
||||||
|
"reference": "https://github.com/zhilemann/ComfyUI-moondream2",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/zhilemann/ComfyUI-moondream2"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "nodes for nightly moondream2 VLM inference\nsupports only captioning and visual queries at the moment"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "shinich39",
|
||||||
|
"title": "comfyui-textarea-is-shit [REMOVED]",
|
||||||
|
"reference": "https://github.com/shinich39/comfyui-textarea-is-shit",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/shinich39/comfyui-textarea-is-shit"
|
||||||
|
],
|
||||||
|
"description": "HTML gives me a textarea like piece of shit.",
|
||||||
|
"install_type": "git-clone"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "shinich39",
|
||||||
|
"title": "comfyui-poor-textarea [REMOVED]",
|
||||||
|
"reference": "https://github.com/shinich39/comfyui-poor-textarea",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/shinich39/comfyui-poor-textarea"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Add commentify, indentation, auto-close brackets in textarea."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "InfiniNode",
|
||||||
|
"title": "Comfyui-InfiniNode-Model-Suite [UNSAFE/REMOVED]",
|
||||||
|
"reference": "https://github.com/InfiniNode/Comfyui-InfiniNode-Model-Suite",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/InfiniNode/Comfyui-InfiniNode-Model-Suite"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Welcome to the InfiniNode Model Suite, a custom node pack for ComfyUI that transforms the process of manipulating generative AI models. Our suite is a direct implementation of the 'GUI-Based Key Converter Development Plan,' designed to remove technical barriers for advanced AI practitioners and integrate seamlessly with existing image generation pipelines.[w/This node pack contains a node that has a vulnerability allowing write to arbitrary file paths.]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Avalre",
|
||||||
|
"title": "ComfyUI-avaNodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/Avalre/ComfyUI-avaNodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Avalre/ComfyUI-avaNodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "These nodes were created to personalize/optimize several ComfyUI nodes for my own use. You can replicate the functionality of most of my nodes by some combination of default ComfyUI nodes and custom nodes from other developers."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Alectriciti",
|
||||||
|
"title": "comfyui-creativeprompts [REMOVED]",
|
||||||
|
"reference": "https://github.com/Alectriciti/comfyui-creativeprompts",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Alectriciti/comfyui-creativeprompts"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A creative alternative to dynamicprompts"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "flybirdxx",
|
||||||
|
"title": "ComfyUI Sliding Window [REMOVED]",
|
||||||
|
"reference": "https://github.com/PixWizardry/ComfyUI_Sliding_Window",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/PixWizardry/ComfyUI_Sliding_Window"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This set of nodes provides a powerful sliding window or 'tiling' technique for processing long videos and animations in ComfyUI. It allows you to work on animations that are longer than your VRAM would typically allow by breaking the job into smaller, overlapping chunks and seamlessly blending them back together."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "SykkoAtHome",
|
||||||
|
"title": "Sykko Tools for ComfyUI [REMOVED]",
|
||||||
|
"reference": "https://github.com/SykkoAtHome/ComfyUI_SykkoTools",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/SykkoAtHome/ComfyUI_SykkoTools"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Utilities for working with camera animations inside ComfyUI. The repository currently provides a node for loading camera motion from ASCII FBX files and a corresponding command line helper for debugging."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "hananbeer",
|
||||||
|
"title": "node_dev - ComfyUI Node Development Helper [REMOVED]",
|
||||||
|
"reference": "https://github.com/hananbeer/node_dev",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/hananbeer/node_dev"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Browse to this endpoint to reload custom nodes for more streamlined development:\nhttp://127.0.0.1:8188/node_dev/reload/<module_name>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "Charonartist",
|
||||||
|
"title": "Comfyui_gemini_tts_node [REMOVED]",
|
||||||
|
"reference": "https://github.com/Charonartist/Comfyui_gemini_tts_node",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/Charonartist/Comfyui_gemini_tts_node"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This custom node is a ComfyUI node for generating speech from text using the Gemini 2.5 Flash Preview TTS API."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "squirrel765",
|
||||||
|
"title": "lorasubdirectory [REMOVED]",
|
||||||
|
"reference": "https://github.com/andrewsthomasj/lorasubdirectory",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/andrewsthomasj/lorasubdirectory"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "only show dropdown of loras ina a given subdirectory"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "shingo1228",
|
||||||
|
"title": "ComfyUI-send-Eagle(slim) [REVMOED]",
|
||||||
|
"id": "send-eagle",
|
||||||
|
"reference": "https://github.com/shingo1228/ComfyUI-send-eagle-slim",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/shingo1228/ComfyUI-send-eagle-slim"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Nodes:Send Webp Image to Eagle. This is an extension node for ComfyUI that allows you to send generated images in webp format to Eagle. This extension node is a re-implementation of the Eagle linkage functions of the previous ComfyUI-send-Eagle node, focusing on the functions required for this node."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "shingo1228",
|
||||||
|
"title": "ComfyUI-SDXL-EmptyLatentImage [REVMOED]",
|
||||||
|
"id": "sdxl-emptylatent",
|
||||||
|
"reference": "https://github.com/shingo1228/ComfyUI-SDXL-EmptyLatentImage",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/shingo1228/ComfyUI-SDXL-EmptyLatentImage"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Nodes:SDXL Empty Latent Image. An extension node for ComfyUI that allows you to select a resolution from the pre-defined json files and output a Latent Image."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "chaunceyyann",
|
||||||
|
"title": "ComfyUI Image Processing Nodes [REMOVED]",
|
||||||
|
"reference": "https://github.com/chaunceyyann/comfyui-image-processing-nodes",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/chaunceyyann/comfyui-image-processing-nodes"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "A collection of custom nodes for ComfyUI focused on image processing operations."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "OgreLemonSoup",
|
||||||
|
"title": "Gallery&Tabs [DEPRECATED]",
|
||||||
|
"id": "LoadImageGallery",
|
||||||
|
"reference": "https://github.com/OgreLemonSoup/ComfyUI-Load-Image-Gallery",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/OgreLemonSoup/ComfyUI-Load-Image-Gallery"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Adds a gallery to the Load Image node and tabs for Load Checkpoint/Lora/etc nodes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "11dogzi",
|
||||||
|
"title": "Qwen-Image ComfyUI [REMOVED]",
|
||||||
|
"reference": "https://github.com/11dogzi/Comfyui-Qwen-Image",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/11dogzi/Comfyui-Qwen-Image"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "This is a custom node package that integrates the Qwen-Image model into ComfyUI."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "BAIS1C",
|
||||||
|
"title": "ComfyUI-AudioDuration [REMOVED]",
|
||||||
|
"reference": "https://github.com/BAIS1C/ComfyUI_BASICDancePoser",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/BAIS1C/ComfyUI_BASICDancePoser"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Node to extract Dance poses from Music to control Video Generations.\nNOTE: The files in the repo are not organized."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "BAIS1C",
|
||||||
|
"title": "ComfyUI_BASICSAdvancedDancePoser [REMOVED]",
|
||||||
|
"reference": "https://github.com/BAIS1C/ComfyUI_BASICSAdvancedDancePoser",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/BAIS1C/ComfyUI_BASICSAdvancedDancePoser"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Professional COCO-WholeBody 133-keypoint dance animation system for ComfyUI"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "fablestudio",
|
"author": "fablestudio",
|
||||||
"title": "ComfyUI-Showrunner-Utils [REMOVED]",
|
"title": "ComfyUI-Showrunner-Utils [REMOVED]",
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,106 @@
|
|||||||
{
|
{
|
||||||
"models": [
|
"models": [
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 i2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for i2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v high noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v high noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors",
|
||||||
|
"size": "28.6GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 t2v low noise 14B (fp8_scaled)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for t2v low noise 14B (fp8_scaled)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
|
||||||
|
"size": "14.3GB"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Comfy-Org/Wan2.2 ti2v 5B (fp16)",
|
||||||
|
"type": "diffusion_model",
|
||||||
|
"base": "Wan2.2",
|
||||||
|
"save_path": "diffusion_models/Wan2.2",
|
||||||
|
"description": "Wan2.2 diffusion model for ti2v 5B (fp16)",
|
||||||
|
"reference": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged",
|
||||||
|
"filename": "wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||||
|
"size": "10.0GB"
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"name": "sam2.1_hiera_tiny.pt",
|
"name": "sam2.1_hiera_tiny.pt",
|
||||||
"type": "sam2.1",
|
"type": "sam2.1",
|
||||||
@@ -586,109 +687,6 @@
|
|||||||
"filename": "llava_llama3_fp16.safetensors",
|
"filename": "llava_llama3_fp16.safetensors",
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp16.safetensors",
|
||||||
"size": "16.1GB"
|
"size": "16.1GB"
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "PixArt-Sigma-XL-2-512-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-sigma",
|
|
||||||
"save_path": "diffusion_models/PixArt-Sigma",
|
|
||||||
"description": "PixArt-Sigma Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS",
|
|
||||||
"filename": "PixArt-Sigma-XL-2-512-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-512-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.44GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "PixArt-Sigma-XL-2-1024-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-sigma",
|
|
||||||
"save_path": "diffusion_models/PixArt-Sigma",
|
|
||||||
"description": "PixArt-Sigma Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
|
|
||||||
"filename": "PixArt-Sigma-XL-2-1024-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.44GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "PixArt-XL-2-1024-MS.safetensors (diffusion)",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "pixart-alpha",
|
|
||||||
"save_path": "diffusion_models/PixArt-Alpha",
|
|
||||||
"description": "PixArt-Alpha Diffusion model",
|
|
||||||
"reference": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS",
|
|
||||||
"filename": "PixArt-XL-2-1024-MS.safetensors",
|
|
||||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
|
||||||
"size": "2.45GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"type": "diffusion_model",
|
|
||||||
"base": "Hunyuan Video",
|
|
||||||
"save_path": "diffusion_models/hunyuan_video",
|
|
||||||
"description": "Huyuan Video diffusion model. repackaged version.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
|
||||||
"filename": "hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors",
|
|
||||||
"size": "25.6GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Comfy-Org/hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"type": "VAE",
|
|
||||||
"base": "Hunyuan Video",
|
|
||||||
"save_path": "VAE",
|
|
||||||
"description": "Huyuan Video VAE model. repackaged version.",
|
|
||||||
"reference": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged",
|
|
||||||
"filename": "hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors",
|
|
||||||
"size": "493MB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "LTX-Video 2B v0.9.1 Checkpoint",
|
|
||||||
"type": "checkpoint",
|
|
||||||
"base": "LTX-Video",
|
|
||||||
"save_path": "checkpoints/LTXV",
|
|
||||||
"description": "LTX-Video is the first DiT-based video generation model capable of generating high-quality videos in real-time. It produces 24 FPS videos at a 768x512 resolution faster than they can be watched. Trained on a large-scale dataset of diverse videos, the model generates high-resolution videos with realistic and varied content.",
|
|
||||||
"reference": "https://huggingface.co/Lightricks/LTX-Video",
|
|
||||||
"filename": "ltx-video-2b-v0.9.1.safetensors",
|
|
||||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.1.safetensors",
|
|
||||||
"size": "5.72GB"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-canny-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-canny-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-canny-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-depth-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-depth-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-depth-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "XLabs-AI/flux-hed-controlnet-v3.safetensors",
|
|
||||||
"type": "controlnet",
|
|
||||||
"base": "FLUX.1",
|
|
||||||
"save_path": "xlabs/controlnets",
|
|
||||||
"description": "ControlNet checkpoints for FLUX.1-dev model by Black Forest Labs.",
|
|
||||||
"reference": "https://huggingface.co/XLabs-AI/flux-controlnet-collections",
|
|
||||||
"filename": "flux-hed-controlnet-v3.safetensors",
|
|
||||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
|
||||||
"size": "1.49GB"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,16 @@
|
|||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
"description": "A minimal template for creating React/TypeScript frontend extensions for ComfyUI, with complete boilerplate setup including internationalization and unit testing."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"author": "comfyui-wiki",
|
||||||
|
"title": "ComfyUI-i18n-demo",
|
||||||
|
"reference": "https://github.com/comfyui-wiki/ComfyUI-i18n-demo",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/comfyui-wiki/ComfyUI-i18n-demo"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "ComfyUI custom node develop i18n support demo "
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"author": "Suzie1",
|
"author": "Suzie1",
|
||||||
"title": "Guide To Making Custom Nodes in ComfyUI",
|
"title": "Guide To Making Custom Nodes in ComfyUI",
|
||||||
@@ -341,6 +351,16 @@
|
|||||||
],
|
],
|
||||||
"install_type": "git-clone",
|
"install_type": "git-clone",
|
||||||
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
"description": "A minimal test suite demonstrating how remote COMBO inputs behave in ComfyUI, with and without force_input"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"author": "J1mB091",
|
||||||
|
"title": "ComfyUI-J1mB091 Custom Nodes",
|
||||||
|
"reference": "https://github.com/J1mB091/ComfyUI-J1mB091",
|
||||||
|
"files": [
|
||||||
|
"https://github.com/J1mB091/ComfyUI-J1mB091"
|
||||||
|
],
|
||||||
|
"install_type": "git-clone",
|
||||||
|
"description": "Vibe Coded ComfyUI Custom Nodes"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
373
notebooks/comfyui_colab_with_manager.ipynb
Normal file
373
notebooks/comfyui_colab_with_manager.ipynb
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "aaaaaaaaaa"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"Git clone the repo and install the requirements. (ignore the pip errors about protobuf)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "bbbbbbbbbb"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# #@title Environment Setup\n",
|
||||||
|
"\n",
|
||||||
|
"from pathlib import Path\n",
|
||||||
|
"\n",
|
||||||
|
"OPTIONS = {}\n",
|
||||||
|
"\n",
|
||||||
|
"USE_GOOGLE_DRIVE = True #@param {type:\"boolean\"}\n",
|
||||||
|
"UPDATE_COMFY_UI = True #@param {type:\"boolean\"}\n",
|
||||||
|
"USE_COMFYUI_MANAGER = True #@param {type:\"boolean\"}\n",
|
||||||
|
"INSTALL_CUSTOM_NODES_DEPENDENCIES = True #@param {type:\"boolean\"}\n",
|
||||||
|
"OPTIONS['USE_GOOGLE_DRIVE'] = USE_GOOGLE_DRIVE\n",
|
||||||
|
"OPTIONS['UPDATE_COMFY_UI'] = UPDATE_COMFY_UI\n",
|
||||||
|
"OPTIONS['USE_COMFYUI_MANAGER'] = USE_COMFYUI_MANAGER\n",
|
||||||
|
"OPTIONS['INSTALL_CUSTOM_NODES_DEPENDENCIES'] = INSTALL_CUSTOM_NODES_DEPENDENCIES\n",
|
||||||
|
"\n",
|
||||||
|
"current_dir = !pwd\n",
|
||||||
|
"WORKSPACE = f\"{current_dir[0]}/ComfyUI\"\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['USE_GOOGLE_DRIVE']:\n",
|
||||||
|
" !echo \"Mounting Google Drive...\"\n",
|
||||||
|
" %cd /\n",
|
||||||
|
"\n",
|
||||||
|
" from google.colab import drive\n",
|
||||||
|
" drive.mount('/content/drive')\n",
|
||||||
|
"\n",
|
||||||
|
" WORKSPACE = \"/content/drive/MyDrive/ComfyUI\"\n",
|
||||||
|
" %cd /content/drive/MyDrive\n",
|
||||||
|
"\n",
|
||||||
|
"![ ! -d $WORKSPACE ] && echo -= Initial setup ComfyUI =- && git clone https://github.com/comfyanonymous/ComfyUI\n",
|
||||||
|
"%cd $WORKSPACE\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['UPDATE_COMFY_UI']:\n",
|
||||||
|
" !echo -= Updating ComfyUI =-\n",
|
||||||
|
"\n",
|
||||||
|
" # Correction of the issue of permissions being deleted on Google Drive.\n",
|
||||||
|
" ![ -f \".ci/nightly/update_windows/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/nightly/update_windows/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/nightly/windows_base_files/run_nvidia_gpu.bat\" ] && chmod 755 .ci/nightly/windows_base_files/run_nvidia_gpu.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/update_windows/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows_cu118/update_comfyui_and_python_dependencies.bat\" ] && chmod 755 .ci/update_windows_cu118/update_comfyui_and_python_dependencies.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/update.py\" ] && chmod 755 .ci/update_windows/update.py\n",
|
||||||
|
" ![ -f \".ci/update_windows/update_comfyui.bat\" ] && chmod 755 .ci/update_windows/update_comfyui.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/README_VERY_IMPORTANT.txt\" ] && chmod 755 .ci/update_windows/README_VERY_IMPORTANT.txt\n",
|
||||||
|
" ![ -f \".ci/update_windows/run_cpu.bat\" ] && chmod 755 .ci/update_windows/run_cpu.bat\n",
|
||||||
|
" ![ -f \".ci/update_windows/run_nvidia_gpu.bat\" ] && chmod 755 .ci/update_windows/run_nvidia_gpu.bat\n",
|
||||||
|
"\n",
|
||||||
|
" !git pull\n",
|
||||||
|
"\n",
|
||||||
|
"!echo -= Install dependencies =-\n",
|
||||||
|
"!pip3 install accelerate\n",
|
||||||
|
"!pip3 install einops transformers>=4.28.1 safetensors>=0.4.2 aiohttp pyyaml Pillow scipy tqdm psutil tokenizers>=0.13.3\n",
|
||||||
|
"!pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121\n",
|
||||||
|
"!pip3 install torchsde\n",
|
||||||
|
"!pip3 install kornia>=0.7.1 spandrel soundfile sentencepiece\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['USE_COMFYUI_MANAGER']:\n",
|
||||||
|
" %cd custom_nodes\n",
|
||||||
|
"\n",
|
||||||
|
" # Correction of the issue of permissions being deleted on Google Drive.\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/check.sh\" ] && chmod 755 ComfyUI-Manager/check.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scan.sh\" ] && chmod 755 ComfyUI-Manager/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/node_db/dev/scan.sh\" ] && chmod 755 ComfyUI-Manager/node_db/dev/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/node_db/tutorial/scan.sh\" ] && chmod 755 ComfyUI-Manager/node_db/tutorial/scan.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scripts/install-comfyui-venv-linux.sh\" ] && chmod 755 ComfyUI-Manager/scripts/install-comfyui-venv-linux.sh\n",
|
||||||
|
" ![ -f \"ComfyUI-Manager/scripts/install-comfyui-venv-win.bat\" ] && chmod 755 ComfyUI-Manager/scripts/install-comfyui-venv-win.bat\n",
|
||||||
|
"\n",
|
||||||
|
" ![ ! -d ComfyUI-Manager ] && echo -= Initial setup ComfyUI-Manager =- && git clone https://github.com/ltdrdata/ComfyUI-Manager\n",
|
||||||
|
" %cd ComfyUI-Manager\n",
|
||||||
|
" !git pull\n",
|
||||||
|
"\n",
|
||||||
|
"%cd $WORKSPACE\n",
|
||||||
|
"\n",
|
||||||
|
"if OPTIONS['INSTALL_CUSTOM_NODES_DEPENDENCIES']:\n",
|
||||||
|
" !echo -= Install custom nodes dependencies =-\n",
|
||||||
|
" !pip install GitPython\n",
|
||||||
|
" !python custom_nodes/ComfyUI-Manager/cm-cli.py restore-dependencies\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "cccccccccc"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"Download some models/checkpoints/vae or custom comfyui nodes (uncomment the commands for the ones you want)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "dddddddddd"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Checkpoints\n",
|
||||||
|
"\n",
|
||||||
|
"### SDXL\n",
|
||||||
|
"### I recommend these workflow examples: https://comfyanonymous.github.io/ComfyUI_examples/sdxl/\n",
|
||||||
|
"\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# SDXL ReVision\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors -P ./models/clip_vision/\n",
|
||||||
|
"\n",
|
||||||
|
"# SD1.5\n",
|
||||||
|
"!wget -c https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.ckpt -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# SD2\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1-base/resolve/main/v2-1_512-ema-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# Some SD1.5 anime style\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix2/AbyssOrangeMix2_hard.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A1_orangemixs.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A3_orangemixs.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp16-pruned.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"# Waifu Diffusion 1.5 (anime style SD2.x 768-v)\n",
|
||||||
|
"#!wget -c https://huggingface.co/waifu-diffusion/wd-1-5-beta3/resolve/main/wd-illusion-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# unCLIP models\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/illuminatiDiffusionV1_v11_unCLIP/resolve/main/illuminatiDiffusionV1_v11-unclip-h-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/wd-1.5-beta2_unCLIP/resolve/main/wd-1-5-beta2-aesthetic-unclip-h-fp16.safetensors -P ./models/checkpoints/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# VAE\n",
|
||||||
|
"!wget -c https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors -P ./models/vae/\n",
|
||||||
|
"#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/VAEs/orangemix.vae.pt -P ./models/vae/\n",
|
||||||
|
"#!wget -c https://huggingface.co/hakurei/waifu-diffusion-v1-4/resolve/main/vae/kl-f8-anime2.ckpt -P ./models/vae/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# Loras\n",
|
||||||
|
"#!wget -c https://civitai.com/api/download/models/10350 -O ./models/loras/theovercomer8sContrastFix_sd21768.safetensors #theovercomer8sContrastFix SD2.x 768-v\n",
|
||||||
|
"#!wget -c https://civitai.com/api/download/models/10638 -O ./models/loras/theovercomer8sContrastFix_sd15.safetensors #theovercomer8sContrastFix SD1.x\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors -P ./models/loras/ #SDXL offset noise lora\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# T2I-Adapter\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_seg_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_sketch_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_keypose_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_openpose_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_color_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_canny_sd14v1.pth -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# T2I Styles Model\n",
|
||||||
|
"#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_style_sd14v1.pth -P ./models/style_models/\n",
|
||||||
|
"\n",
|
||||||
|
"# CLIPVision model (needed for styles model)\n",
|
||||||
|
"#!wget -c https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/pytorch_model.bin -O ./models/clip_vision/clip_vit14.bin\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# ControlNet\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_canny_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_lineart_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_seg_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_softedge_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11u_sd15_tile_fp16.safetensors -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# ControlNet SDXL\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-canny-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-depth-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-recolor-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"#!wget -c https://huggingface.co/stabilityai/control-lora/resolve/main/control-LoRAs-rank256/control-lora-sketch-rank256.safetensors -P ./models/controlnet/\n",
|
||||||
|
"\n",
|
||||||
|
"# Controlnet Preprocessor nodes by Fannovel16\n",
|
||||||
|
"#!cd custom_nodes && git clone https://github.com/Fannovel16/comfy_controlnet_preprocessors; cd comfy_controlnet_preprocessors && python install.py\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# GLIGEN\n",
|
||||||
|
"#!wget -c https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned_fp16.safetensors -P ./models/gligen/\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# ESRGAN upscale model\n",
|
||||||
|
"#!wget -c https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth -P ./models/upscale_models/\n",
|
||||||
|
"#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x2.pth -P ./models/upscale_models/\n",
|
||||||
|
"#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth -P ./models/upscale_models/\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "kkkkkkkkkkkkkkk"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with cloudflared (Recommended Way)\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "jjjjjjjjjjjjjj"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"!wget -P ~ https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb\n",
|
||||||
|
"!dpkg -i ~/cloudflared-linux-amd64.deb\n",
|
||||||
|
"\n",
|
||||||
|
"import subprocess\n",
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"import urllib.request\n",
|
||||||
|
"\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" print(\"\\nComfyUI finished loading, trying to launch cloudflared (if it gets stuck here cloudflared is having issues)\\n\")\n",
|
||||||
|
"\n",
|
||||||
|
" p = subprocess.Popen([\"cloudflared\", \"tunnel\", \"--url\", \"http://127.0.0.1:{}\".format(port)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
|
||||||
|
" for line in p.stderr:\n",
|
||||||
|
" l = line.decode()\n",
|
||||||
|
" if \"trycloudflare.com \" in l:\n",
|
||||||
|
" print(\"This is the URL to access ComfyUI:\", l[l.find(\"http\"):], end='')\n",
|
||||||
|
" #print(l, end='')\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "kkkkkkkkkkkkkk"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with localtunnel\n",
|
||||||
|
"\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "jjjjjjjjjjjjj"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"!npm install -g localtunnel\n",
|
||||||
|
"\n",
|
||||||
|
"import subprocess\n",
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"import urllib.request\n",
|
||||||
|
"\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" print(\"\\nComfyUI finished loading, trying to launch localtunnel (if it gets stuck here localtunnel is having issues)\\n\")\n",
|
||||||
|
"\n",
|
||||||
|
" print(\"The password/enpoint ip for localtunnel is:\", urllib.request.urlopen('https://ipv4.icanhazip.com').read().decode('utf8').strip(\"\\n\"))\n",
|
||||||
|
" p = subprocess.Popen([\"lt\", \"--port\", \"{}\".format(port)], stdout=subprocess.PIPE)\n",
|
||||||
|
" for line in p.stdout:\n",
|
||||||
|
" print(line.decode(), end='')\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"id": "gggggggggg"
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"### Run ComfyUI with colab iframe (use only in case the previous way with localtunnel doesn't work)\n",
|
||||||
|
"\n",
|
||||||
|
"You should see the ui appear in an iframe. If you get a 403 error, it's your firefox settings or an extension that's messing things up.\n",
|
||||||
|
"\n",
|
||||||
|
"If you want to open it in another window use the link.\n",
|
||||||
|
"\n",
|
||||||
|
"Note that some UI features like live image previews won't work because the colab iframe blocks websockets."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"id": "hhhhhhhhhh"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import threading\n",
|
||||||
|
"import time\n",
|
||||||
|
"import socket\n",
|
||||||
|
"def iframe_thread(port):\n",
|
||||||
|
" while True:\n",
|
||||||
|
" time.sleep(0.5)\n",
|
||||||
|
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||||
|
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||||
|
" if result == 0:\n",
|
||||||
|
" break\n",
|
||||||
|
" sock.close()\n",
|
||||||
|
" from google.colab import output\n",
|
||||||
|
" output.serve_kernel_port_as_iframe(port, height=1024)\n",
|
||||||
|
" print(\"to open it in a window you can open this link here:\")\n",
|
||||||
|
" output.serve_kernel_port_as_window(port)\n",
|
||||||
|
"\n",
|
||||||
|
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
|
||||||
|
"\n",
|
||||||
|
"!python main.py --dont-print-server"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"accelerator": "GPU",
|
||||||
|
"colab": {
|
||||||
|
"provenance": []
|
||||||
|
},
|
||||||
|
"gpuClass": "standard",
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"name": "python"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 0
|
||||||
|
}
|
||||||
1569
openapi.yaml
1569
openapi.yaml
File diff suppressed because it is too large
Load Diff
@@ -12,10 +12,13 @@ import ast
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from .common import security_check
|
glob_path = os.path.join(os.path.dirname(__file__), "glob")
|
||||||
from .common import manager_util
|
sys.path.append(glob_path)
|
||||||
from .common import cm_global
|
|
||||||
from .common import manager_downloader
|
import security_check
|
||||||
|
import manager_util
|
||||||
|
import cm_global
|
||||||
|
import manager_downloader
|
||||||
import folder_paths
|
import folder_paths
|
||||||
|
|
||||||
manager_util.add_python_path_to_env()
|
manager_util.add_python_path_to_env()
|
||||||
@@ -63,14 +66,16 @@ def is_import_failed_extension(name):
|
|||||||
comfy_path = os.environ.get('COMFYUI_PATH')
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
comfy_base_path = os.environ.get('COMFYUI_FOLDERS_BASE_PATH')
|
||||||
|
|
||||||
|
if comfy_path is None:
|
||||||
|
# legacy env var
|
||||||
|
comfy_path = os.environ.get('COMFYUI_PATH')
|
||||||
|
|
||||||
if comfy_path is None:
|
if comfy_path is None:
|
||||||
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__))
|
||||||
os.environ['COMFYUI_PATH'] = comfy_path
|
|
||||||
|
|
||||||
if comfy_base_path is None:
|
if comfy_base_path is None:
|
||||||
comfy_base_path = comfy_path
|
comfy_base_path = comfy_path
|
||||||
|
|
||||||
|
|
||||||
sys.__comfyui_manager_register_message_collapse = register_message_collapse
|
sys.__comfyui_manager_register_message_collapse = register_message_collapse
|
||||||
sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension
|
sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension
|
||||||
cm_global.register_api('cm.register_message_collapse', register_message_collapse)
|
cm_global.register_api('cm.register_message_collapse', register_message_collapse)
|
||||||
@@ -86,6 +91,9 @@ manager_pip_blacklist_path = os.path.join(manager_files_path, "pip_blacklist.lis
|
|||||||
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
restore_snapshot_path = os.path.join(manager_files_path, "startup-scripts", "restore-snapshot.json")
|
||||||
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
manager_config_path = os.path.join(manager_files_path, 'config.ini')
|
||||||
|
|
||||||
|
cm_cli_path = os.path.join(comfyui_manager_path, "cm-cli.py")
|
||||||
|
|
||||||
|
|
||||||
default_conf = {}
|
default_conf = {}
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
@@ -392,11 +400,7 @@ try:
|
|||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
global is_start_mode
|
global is_start_mode
|
||||||
|
|
||||||
try:
|
message = record.getMessage()
|
||||||
message = record.getMessage()
|
|
||||||
except Exception as e:
|
|
||||||
message = f"<<logging error>>: {record} - {e}"
|
|
||||||
original_stderr.write(message)
|
|
||||||
|
|
||||||
if is_start_mode:
|
if is_start_mode:
|
||||||
match = re.search(pat_import_fail, message)
|
match = re.search(pat_import_fail, message)
|
||||||
@@ -439,6 +443,35 @@ except Exception as e:
|
|||||||
print(f"[ComfyUI-Manager] Logging failed: {e}")
|
print(f"[ComfyUI-Manager] Logging failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dependencies():
|
||||||
|
try:
|
||||||
|
import git # noqa: F401
|
||||||
|
import toml # noqa: F401
|
||||||
|
import rich # noqa: F401
|
||||||
|
import chardet # noqa: F401
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
my_path = os.path.dirname(__file__)
|
||||||
|
requirements_path = os.path.join(my_path, "requirements.txt")
|
||||||
|
|
||||||
|
print("## ComfyUI-Manager: installing dependencies. (GitPython)")
|
||||||
|
try:
|
||||||
|
subprocess.check_output(manager_util.make_pip_cmd(['install', '-r', requirements_path]))
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print("## [ERROR] ComfyUI-Manager: Attempting to reinstall dependencies using an alternative method.")
|
||||||
|
try:
|
||||||
|
subprocess.check_output(manager_util.make_pip_cmd(['install', '--user', '-r', requirements_path]))
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print("## [ERROR] ComfyUI-Manager: Failed to install the GitPython package in the correct Python environment. Please install it manually in the appropriate environment. (You can seek help at https://app.element.io/#/room/%23comfyui_space%3Amatrix.org)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("## ComfyUI-Manager: installing dependencies done.")
|
||||||
|
except:
|
||||||
|
# maybe we should sys.exit() here? there is at least two screens worth of error messages still being pumped after our error messages
|
||||||
|
print("## [ERROR] ComfyUI-Manager: GitPython package seems to be installed, but failed to load somehow. Make sure you have a working git client installed")
|
||||||
|
|
||||||
|
ensure_dependencies()
|
||||||
|
|
||||||
|
|
||||||
print("** ComfyUI startup time:", current_timestamp())
|
print("** ComfyUI startup time:", current_timestamp())
|
||||||
print("** Platform:", platform.system())
|
print("** Platform:", platform.system())
|
||||||
print("** Python version:", sys.version)
|
print("** Python version:", sys.version)
|
||||||
@@ -462,7 +495,7 @@ def read_downgrade_blacklist():
|
|||||||
items = [x.strip() for x in items if x != '']
|
items = [x.strip() for x in items if x != '']
|
||||||
cm_global.pip_downgrade_blacklist += items
|
cm_global.pip_downgrade_blacklist += items
|
||||||
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist))
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -568,10 +601,7 @@ if os.path.exists(restore_snapshot_path):
|
|||||||
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
if 'COMFYUI_FOLDERS_BASE_PATH' not in new_env:
|
||||||
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
|
new_env["COMFYUI_FOLDERS_BASE_PATH"] = comfy_path
|
||||||
|
|
||||||
if 'COMFYUI_PATH' not in new_env:
|
cmd_str = [sys.executable, cm_cli_path, 'restore-snapshot', restore_snapshot_path]
|
||||||
new_env['COMFYUI_PATH'] = os.path.dirname(folder_paths.__file__)
|
|
||||||
|
|
||||||
cmd_str = [sys.executable, '-m', 'comfyui_manager.cm_cli', 'restore-snapshot', restore_snapshot_path]
|
|
||||||
exit_code = process_wrap(cmd_str, custom_nodes_base_path, handler=msg_capture, env=new_env)
|
exit_code = process_wrap(cmd_str, custom_nodes_base_path, handler=msg_capture, env=new_env)
|
||||||
|
|
||||||
if exit_code != 0:
|
if exit_code != 0:
|
||||||
@@ -1,70 +1,15 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools >= 61.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "comfyui-manager"
|
name = "comfyui-manager"
|
||||||
license = { text = "GPL-3.0-only" }
|
|
||||||
version = "5.0b1"
|
|
||||||
requires-python = ">= 3.9"
|
|
||||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||||
readme = "README.md"
|
version = "3.37"
|
||||||
keywords = ["comfyui", "comfyui-manager"]
|
license = { file = "LICENSE.txt" }
|
||||||
|
dependencies = ["GitPython", "PyGithub", "matrix-nio", "transformers", "huggingface-hub>0.20", "typer", "rich", "typing-extensions", "toml", "uv", "chardet"]
|
||||||
maintainers = [
|
|
||||||
{ name = "Dr.Lt.Data", email = "dr.lt.data@gmail.com" },
|
|
||||||
{ name = "Yoland Yan", email = "yoland@comfy.org" },
|
|
||||||
{ name = "James Kwon", email = "hongilkwon316@gmail.com" },
|
|
||||||
{ name = "Robin Huang", email = "robin@comfy.org" },
|
|
||||||
]
|
|
||||||
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 5 - Production/Stable",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
|
||||||
]
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
"GitPython",
|
|
||||||
"PyGithub",
|
|
||||||
# "matrix-nio",
|
|
||||||
"transformers",
|
|
||||||
"huggingface-hub>0.20",
|
|
||||||
"typer",
|
|
||||||
"rich",
|
|
||||||
"typing-extensions",
|
|
||||||
"toml",
|
|
||||||
"uv",
|
|
||||||
"chardet"
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
dev = ["pre-commit", "pytest", "ruff", "pytest-cov"]
|
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Repository = "https://github.com/ltdrdata/ComfyUI-Manager"
|
Repository = "https://github.com/ltdrdata/ComfyUI-Manager"
|
||||||
|
# Used by Comfy Registry https://comfyregistry.org
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.comfy]
|
||||||
where = ["."]
|
PublisherId = "drltdata"
|
||||||
include = ["comfyui_manager*"]
|
DisplayName = "ComfyUI-Manager"
|
||||||
|
Icon = ""
|
||||||
[project.scripts]
|
|
||||||
cm-cli = "comfyui_manager.cm_cli.__main__:main"
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
line-length = 120
|
|
||||||
target-version = "py39"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
select = [
|
|
||||||
"E4", # default
|
|
||||||
"E7", # default
|
|
||||||
"E9", # default
|
|
||||||
"F", # default
|
|
||||||
"I", # isort-like behavior (import statement sorting)
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
markers = [
|
|
||||||
"integration: marks tests as integration tests (deselect with '-m \"not integration\"')",
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
GitPython
|
GitPython
|
||||||
PyGithub
|
PyGithub
|
||||||
# matrix-nio
|
matrix-nio
|
||||||
transformers
|
transformers
|
||||||
huggingface-hub>0.20
|
huggingface-hub>0.20
|
||||||
typer
|
typer
|
||||||
|
|||||||
@@ -9,4 +9,4 @@ lint.select = [
|
|||||||
"F",
|
"F",
|
||||||
]
|
]
|
||||||
|
|
||||||
exclude = ["*.ipynb", "tests"]
|
exclude = ["*.ipynb"]
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ def extract_nodes(code_text):
|
|||||||
return s
|
return s
|
||||||
else:
|
else:
|
||||||
return set()
|
return set()
|
||||||
except Exception:
|
except:
|
||||||
return set()
|
return set()
|
||||||
|
|
||||||
|
|
||||||
@@ -396,7 +396,7 @@ def update_custom_nodes():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
download_url(url, temp_dir)
|
download_url(url, temp_dir)
|
||||||
except Exception:
|
except:
|
||||||
print(f"[ERROR] Cannot download '{url}'")
|
print(f"[ERROR] Cannot download '{url}'")
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
with concurrent.futures.ThreadPoolExecutor(10) as executor:
|
||||||
|
|||||||
39
scripts/colab-dependencies.py
Normal file
39
scripts/colab-dependencies.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def get_enabled_subdirectories_with_files(base_directory):
|
||||||
|
subdirs_with_files = []
|
||||||
|
for subdir in os.listdir(base_directory):
|
||||||
|
try:
|
||||||
|
full_path = os.path.join(base_directory, subdir)
|
||||||
|
if os.path.isdir(full_path) and not subdir.endswith(".disabled") and not subdir.startswith('.') and subdir != '__pycache__':
|
||||||
|
print(f"## Install dependencies for '{subdir}'")
|
||||||
|
requirements_file = os.path.join(full_path, "requirements.txt")
|
||||||
|
install_script = os.path.join(full_path, "install.py")
|
||||||
|
|
||||||
|
if os.path.exists(requirements_file) or os.path.exists(install_script):
|
||||||
|
subdirs_with_files.append((full_path, requirements_file, install_script))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"EXCEPTION During Dependencies INSTALL on '{subdir}':\n{e}")
|
||||||
|
|
||||||
|
return subdirs_with_files
|
||||||
|
|
||||||
|
|
||||||
|
def install_requirements(requirements_file_path):
|
||||||
|
if os.path.exists(requirements_file_path):
|
||||||
|
subprocess.run(["pip", "install", "-r", requirements_file_path])
|
||||||
|
|
||||||
|
|
||||||
|
def run_install_script(install_script_path):
|
||||||
|
if os.path.exists(install_script_path):
|
||||||
|
subprocess.run(["python", install_script_path])
|
||||||
|
|
||||||
|
|
||||||
|
custom_nodes_directory = "custom_nodes"
|
||||||
|
subdirs_with_files = get_enabled_subdirectories_with_files(custom_nodes_directory)
|
||||||
|
|
||||||
|
|
||||||
|
for subdir, requirements_file, install_script in subdirs_with_files:
|
||||||
|
install_requirements(requirements_file)
|
||||||
|
run_install_script(install_script)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user