7 Commits

Author SHA1 Message Date
Hayden
6d67b00b17 Fix publishing failed (#69) 2024-11-29 09:40:25 +08:00
Hayden
cda24405b5 prepare release 2.1.3 2024-11-28 13:03:06 +08:00
Hayden
6fa90be8c4 Fix preview path (#66) 2024-11-28 13:02:36 +08:00
Hayden
5a28789af7 prepare release 2.1.2 2024-11-28 12:07:45 +08:00
Hayden
dada903b2b Feature scan extra folders (#65)
* scan extra folders

Other extension may be add models folder in folder_paths

* Fix scanned non-model files

Model file suffix specified
2024-11-28 12:04:23 +08:00
Hayden
e8916307aa Skip hidden model files (#64) 2024-11-28 12:01:55 +08:00
Hayden
8b6c6ebdea Fix some minor bug (#62)
* Fix print info

* Delete empty line
2024-11-25 15:58:18 +08:00
6 changed files with 71 additions and 64 deletions

View File

@@ -23,7 +23,6 @@ if len(uninstalled_package) > 0:
# Init config settings # Init config settings
config.extension_uri = extension_uri config.extension_uri = extension_uri
utils.resolve_model_base_paths()
version = utils.get_current_version() version = utils.get_current_version()
utils.download_web_distribution(version) utils.download_web_distribution(version)
@@ -95,7 +94,7 @@ async def get_model_paths(request):
""" """
Returns the base folders for models. Returns the base folders for models.
""" """
model_base_paths = config.model_base_paths model_base_paths = utils.resolve_model_base_paths()
return web.json_response({"success": True, "data": model_base_paths}) return web.json_response({"success": True, "data": model_base_paths})
@@ -130,7 +129,7 @@ async def read_models(request):
Scan all models and read their information. Scan all models and read their information.
""" """
try: try:
result = services.scan_models() result = services.scan_models(request)
return web.json_response({"success": True, "data": result}) return web.json_response({"success": True, "data": result})
except Exception as e: except Exception as e:
error_msg = f"Read models failed: {str(e)}" error_msg = f"Read models failed: {str(e)}"
@@ -232,7 +231,7 @@ async def download_model_info(request):
post = await utils.get_request_body(request) post = await utils.get_request_body(request)
try: try:
scan_mode = post.get("scanMode", "diff") scan_mode = post.get("scanMode", "diff")
await services.download_model_info(scan_mode) await services.download_model_info(scan_mode, request)
return web.json_response({"success": True}) return web.json_response({"success": True})
except Exception as e: except Exception as e:
error_msg = f"Download model info failed: {str(e)}" error_msg = f"Download model info failed: {str(e)}"
@@ -288,10 +287,10 @@ async def migrate_legacy_information(request):
Migrate legacy information. Migrate legacy information.
""" """
try: try:
await services.migrate_legacy_information() await services.migrate_legacy_information(request)
return web.json_response({"success": True}) return web.json_response({"success": True})
except Exception as e: except Exception as e:
error_msg = f"Download model info failed: {str(e)}" error_msg = f"Migrate model info failed: {str(e)}"
utils.print_error(error_msg) utils.print_error(error_msg)
return web.json_response({"success": False, "error": error_msg}) return web.json_response({"success": False, "error": error_msg})

View File

@@ -1,7 +1,6 @@
extension_tag = "ComfyUI Model Manager" extension_tag = "ComfyUI Model Manager"
extension_uri: str = None extension_uri: str = None
model_base_paths: dict[str, list[str]] = {}
setting_key = { setting_key = {
@@ -12,6 +11,9 @@ setting_key = {
"download": { "download": {
"max_task_count": "ModelManager.Download.MaxTaskCount", "max_task_count": "ModelManager.Download.MaxTaskCount",
}, },
"scan": {
"include_hidden_files": "ModelManager.Scan.IncludeHiddenFiles"
},
} }
user_agent = "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148" user_agent = "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148"

View File

@@ -8,16 +8,16 @@ from . import download
from . import searcher from . import searcher
def scan_models(): def scan_models(request):
result = [] result = []
model_base_paths = config.model_base_paths model_base_paths = utils.resolve_model_base_paths()
for model_type in model_base_paths: for model_type in model_base_paths:
folders, extensions = folder_paths.folder_names_and_paths[model_type] folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders): for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path) files = utils.recursive_search_files(base_path, request)
models = folder_paths.filter_files_extensions(files, extensions) models = folder_paths.filter_files_extensions(files, folder_paths.supported_pt_extensions)
for fullname in models: for fullname in models:
fullname = utils.normalize_path(fullname) fullname = utils.normalize_path(fullname)
@@ -29,14 +29,13 @@ def scan_models():
# Resolve preview # Resolve preview
image_name = utils.get_model_preview_name(abs_path) image_name = utils.get_model_preview_name(abs_path)
image_name = utils.join_path(os.path.dirname(fullname), image_name)
abs_image_path = utils.join_path(base_path, image_name) abs_image_path = utils.join_path(base_path, image_name)
if os.path.isfile(abs_image_path): if os.path.isfile(abs_image_path):
image_state = os.stat(abs_image_path) image_state = os.stat(abs_image_path)
image_timestamp = round(image_state.st_mtime_ns / 1000000) image_timestamp = round(image_state.st_mtime_ns / 1000000)
image_name = f"{image_name}?ts={image_timestamp}" image_name = f"{image_name}?ts={image_timestamp}"
model_preview = ( model_preview = f"/model-manager/preview/{model_type}/{path_index}/{image_name}"
f"/model-manager/preview/{model_type}/{path_index}/{image_name}"
)
model_info = { model_info = {
"fullname": fullname, "fullname": fullname,
@@ -138,16 +137,16 @@ def fetch_model_info(model_page: str):
return result return result
async def download_model_info(scan_mode: str): async def download_model_info(scan_mode: str, request):
utils.print_info(f"Download model info for {scan_mode}") utils.print_info(f"Download model info for {scan_mode}")
model_base_paths = config.model_base_paths model_base_paths = utils.resolve_model_base_paths()
for model_type in model_base_paths: for model_type in model_base_paths:
folders, extensions = folder_paths.folder_names_and_paths[model_type] folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders): for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path) files = utils.recursive_search_files(base_path, request)
models = folder_paths.filter_files_extensions(files, extensions) models = folder_paths.filter_files_extensions(files, folder_paths.supported_pt_extensions)
for fullname in models: for fullname in models:
fullname = utils.normalize_path(fullname) fullname = utils.normalize_path(fullname)
@@ -161,16 +160,8 @@ async def download_model_info(scan_mode: str):
has_preview = os.path.isfile(abs_image_path) has_preview = os.path.isfile(abs_image_path)
description_name = utils.get_model_description_name(abs_model_path) description_name = utils.get_model_description_name(abs_model_path)
abs_description_path = ( abs_description_path = utils.join_path(base_path, description_name) if description_name else None
utils.join_path(base_path, description_name) has_description = os.path.isfile(abs_description_path) if abs_description_path else False
if description_name
else None
)
has_description = (
os.path.isfile(abs_description_path)
if abs_description_path
else False
)
try: try:
@@ -185,46 +176,38 @@ async def download_model_info(scan_mode: str):
utils.print_debug(f"Calculate sha256 for {abs_model_path}") utils.print_debug(f"Calculate sha256 for {abs_model_path}")
hash_value = utils.calculate_sha256(abs_model_path) hash_value = utils.calculate_sha256(abs_model_path)
utils.print_info(f"Searching model info by hash {hash_value}") utils.print_info(f"Searching model info by hash {hash_value}")
model_info = searcher.CivitaiModelSearcher().search_by_hash( model_info = searcher.CivitaiModelSearcher().search_by_hash(hash_value)
hash_value
)
preview_url_list = model_info.get("preview", []) preview_url_list = model_info.get("preview", [])
preview_image_url = ( preview_image_url = preview_url_list[0] if preview_url_list else None
preview_url_list[0] if preview_url_list else None
)
if preview_image_url: if preview_image_url:
utils.print_debug(f"Save preview image to {abs_image_path}") utils.print_debug(f"Save preview image to {abs_image_path}")
utils.save_model_preview_image( utils.save_model_preview_image(abs_model_path, preview_image_url)
abs_model_path, preview_image_url
)
description = model_info.get("description", None) description = model_info.get("description", None)
if description: if description:
utils.save_model_description(abs_model_path, description) utils.save_model_description(abs_model_path, description)
except Exception as e: except Exception as e:
utils.print_error( utils.print_error(f"Failed to download model info for {abs_model_path}: {e}")
f"Failed to download model info for {abs_model_path}: {e}"
)
utils.print_debug("Completed scan model information.") utils.print_debug("Completed scan model information.")
async def migrate_legacy_information(): async def migrate_legacy_information(request):
import json import json
import yaml import yaml
from PIL import Image from PIL import Image
utils.print_info(f"Migrating legacy information...") utils.print_info(f"Migrating legacy information...")
model_base_paths = config.model_base_paths model_base_paths = utils.resolve_model_base_paths()
for model_type in model_base_paths: for model_type in model_base_paths:
folders, extensions = folder_paths.folder_names_and_paths[model_type] folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders): for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path) files = utils.recursive_search_files(base_path, request)
models = folder_paths.filter_files_extensions(files, extensions) models = folder_paths.filter_files_extensions(files, folder_paths.supported_pt_extensions)
for fullname in models: for fullname in models:
fullname = utils.normalize_path(fullname) fullname = utils.normalize_path(fullname)
@@ -290,5 +273,4 @@ async def migrate_legacy_information():
with open(description_path, "w", encoding="utf-8", newline="") as f: with open(description_path, "w", encoding="utf-8", newline="") as f:
f.write("\n".join(description_parts)) f.write("\n".join(description_parts))
utils.print_debug("Completed migrate model information.") utils.print_debug("Completed migrate model information.")

View File

@@ -103,9 +103,7 @@ def download_web_distribution(version: str):
print_info("Extracting web distribution...") print_info("Extracting web distribution...")
with tarfile.open(temp_file, "r:gz") as tar: with tarfile.open(temp_file, "r:gz") as tar:
members = [ members = [member for member in tar.getmembers() if member.name.startswith("web/")]
member for member in tar.getmembers() if member.name.startswith("web/")
]
tar.extractall(path=config.extension_uri, members=members) tar.extractall(path=config.extension_uri, members=members)
os.remove(temp_file) os.remove(temp_file)
@@ -120,21 +118,21 @@ def download_web_distribution(version: str):
def resolve_model_base_paths(): def resolve_model_base_paths():
folders = list(folder_paths.folder_names_and_paths.keys()) folders = list(folder_paths.folder_names_and_paths.keys())
config.model_base_paths = {} model_base_paths = {}
folder_black_list = ["configs", "custom_nodes"]
for folder in folders: for folder in folders:
if folder == "configs": if folder in folder_black_list:
continue
if folder == "custom_nodes":
continue continue
folders = folder_paths.get_folder_paths(folder) folders = folder_paths.get_folder_paths(folder)
config.model_base_paths[folder] = [normalize_path(f) for f in folders] model_base_paths[folder] = [normalize_path(f) for f in folders]
return model_base_paths
def get_full_path(model_type: str, path_index: int, filename: str): def get_full_path(model_type: str, path_index: int, filename: str):
""" """
Get the absolute path in the model type through string concatenation. Get the absolute path in the model type through string concatenation.
""" """
folders = config.model_base_paths.get(model_type, []) folders = resolve_model_base_paths().get(model_type, [])
if not path_index < len(folders): if not path_index < len(folders):
raise RuntimeError(f"PathIndex {path_index} is not in {model_type}") raise RuntimeError(f"PathIndex {path_index} is not in {model_type}")
base_path = folders[path_index] base_path = folders[path_index]
@@ -146,7 +144,7 @@ def get_valid_full_path(model_type: str, path_index: int, filename: str):
""" """
Like get_full_path but it will check whether the file is valid. Like get_full_path but it will check whether the file is valid.
""" """
folders = config.model_base_paths.get(model_type, []) folders = resolve_model_base_paths().get(model_type, [])
if not path_index < len(folders): if not path_index < len(folders):
raise RuntimeError(f"PathIndex {path_index} is not in {model_type}") raise RuntimeError(f"PathIndex {path_index} is not in {model_type}")
base_path = folders[path_index] base_path = folders[path_index]
@@ -154,9 +152,7 @@ def get_valid_full_path(model_type: str, path_index: int, filename: str):
if os.path.isfile(full_path): if os.path.isfile(full_path):
return full_path return full_path
elif os.path.islink(full_path): elif os.path.islink(full_path):
raise RuntimeError( raise RuntimeError(f"WARNING path {full_path} exists but doesn't link anywhere, skipping.")
f"WARNING path {full_path} exists but doesn't link anywhere, skipping."
)
def get_download_path(): def get_download_path():
@@ -166,11 +162,29 @@ def get_download_path():
return download_path return download_path
def recursive_search_files(directory: str): def recursive_search_files(directory: str, request):
files, folder_all = folder_paths.recursive_search( if not os.path.isdir(directory):
directory, excluded_dir_names=[".git"] return []
)
return [normalize_path(f) for f in files] excluded_dir_names = [".git"]
result = []
include_hidden_files = get_setting_value(request, "scan.include_hidden_files", False)
for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True):
subdirs[:] = [d for d in subdirs if d not in excluded_dir_names]
if not include_hidden_files:
subdirs[:] = [d for d in subdirs if not d.startswith(".")]
filenames[:] = [f for f in filenames if not f.startswith(".")]
for file_name in filenames:
try:
relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory)
result.append(relative_path)
except:
logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.")
continue
return [normalize_path(f) for f in result]
def search_files(directory: str): def search_files(directory: str):

View File

@@ -1,7 +1,7 @@
[project] [project]
name = "comfyui-model-manager" name = "comfyui-model-manager"
description = "Manage models: browsing, download and delete." description = "Manage models: browsing, download and delete."
version = "2.1.1" version = "2.1.3"
license = "LICENSE" license = "LICENSE"
dependencies = ["markdownify"] dependencies = ["markdownify"]
@@ -13,3 +13,6 @@ Repository = "https://github.com/hayden-fr/ComfyUI-Model-Manager"
PublisherId = "hayden" PublisherId = "hayden"
DisplayName = "ComfyUI-Model-Manager" DisplayName = "ComfyUI-Model-Manager"
Icon = "" Icon = ""
[tool.black]
line-length = 160

View File

@@ -239,5 +239,12 @@ function useAddConfigSettings(store: import('hooks/store').StoreProvider) {
}) })
}, },
}) })
app.ui?.settings.addSetting({
id: 'ModelManager.Scan.IncludeHiddenFiles',
name: 'Include hidden files(start with .)',
defaultValue: false,
type: 'boolean',
})
}) })
} }