diff --git a/__init__.py b/__init__.py index 0023b2e..2ca3a62 100644 --- a/__init__.py +++ b/__init__.py @@ -130,7 +130,7 @@ async def read_models(request): Scan all models and read their information. """ try: - result = services.scan_models() + result = services.scan_models(request) return web.json_response({"success": True, "data": result}) except Exception as e: error_msg = f"Read models failed: {str(e)}" @@ -232,7 +232,7 @@ async def download_model_info(request): post = await utils.get_request_body(request) try: scan_mode = post.get("scanMode", "diff") - await services.download_model_info(scan_mode) + await services.download_model_info(scan_mode, request) return web.json_response({"success": True}) except Exception as e: error_msg = f"Download model info failed: {str(e)}" @@ -288,7 +288,7 @@ async def migrate_legacy_information(request): Migrate legacy information. """ try: - await services.migrate_legacy_information() + await services.migrate_legacy_information(request) return web.json_response({"success": True}) except Exception as e: error_msg = f"Migrate model info failed: {str(e)}" diff --git a/py/config.py b/py/config.py index bfb04c7..7f6a810 100644 --- a/py/config.py +++ b/py/config.py @@ -12,6 +12,9 @@ setting_key = { "download": { "max_task_count": "ModelManager.Download.MaxTaskCount", }, + "scan": { + "include_hidden_files": "ModelManager.Scan.IncludeHiddenFiles" + }, } user_agent = "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148" diff --git a/py/services.py b/py/services.py index cfc667f..e1e610d 100644 --- a/py/services.py +++ b/py/services.py @@ -8,14 +8,14 @@ from . import download from . import searcher -def scan_models(): +def scan_models(request): result = [] model_base_paths = config.model_base_paths for model_type in model_base_paths: folders, extensions = folder_paths.folder_names_and_paths[model_type] for path_index, base_path in enumerate(folders): - files = utils.recursive_search_files(base_path) + files = utils.recursive_search_files(base_path, request) models = folder_paths.filter_files_extensions(files, extensions) @@ -34,9 +34,7 @@ def scan_models(): image_state = os.stat(abs_image_path) image_timestamp = round(image_state.st_mtime_ns / 1000000) image_name = f"{image_name}?ts={image_timestamp}" - model_preview = ( - f"/model-manager/preview/{model_type}/{path_index}/{image_name}" - ) + model_preview = f"/model-manager/preview/{model_type}/{path_index}/{image_name}" model_info = { "fullname": fullname, @@ -138,14 +136,14 @@ def fetch_model_info(model_page: str): return result -async def download_model_info(scan_mode: str): +async def download_model_info(scan_mode: str, request): utils.print_info(f"Download model info for {scan_mode}") model_base_paths = config.model_base_paths for model_type in model_base_paths: folders, extensions = folder_paths.folder_names_and_paths[model_type] for path_index, base_path in enumerate(folders): - files = utils.recursive_search_files(base_path) + files = utils.recursive_search_files(base_path, request) models = folder_paths.filter_files_extensions(files, extensions) @@ -161,16 +159,8 @@ async def download_model_info(scan_mode: str): has_preview = os.path.isfile(abs_image_path) description_name = utils.get_model_description_name(abs_model_path) - abs_description_path = ( - utils.join_path(base_path, description_name) - if description_name - else None - ) - has_description = ( - os.path.isfile(abs_description_path) - if abs_description_path - else False - ) + abs_description_path = utils.join_path(base_path, description_name) if description_name else None + has_description = os.path.isfile(abs_description_path) if abs_description_path else False try: @@ -185,32 +175,24 @@ async def download_model_info(scan_mode: str): utils.print_debug(f"Calculate sha256 for {abs_model_path}") hash_value = utils.calculate_sha256(abs_model_path) utils.print_info(f"Searching model info by hash {hash_value}") - model_info = searcher.CivitaiModelSearcher().search_by_hash( - hash_value - ) + model_info = searcher.CivitaiModelSearcher().search_by_hash(hash_value) preview_url_list = model_info.get("preview", []) - preview_image_url = ( - preview_url_list[0] if preview_url_list else None - ) + preview_image_url = preview_url_list[0] if preview_url_list else None if preview_image_url: utils.print_debug(f"Save preview image to {abs_image_path}") - utils.save_model_preview_image( - abs_model_path, preview_image_url - ) + utils.save_model_preview_image(abs_model_path, preview_image_url) description = model_info.get("description", None) if description: utils.save_model_description(abs_model_path, description) except Exception as e: - utils.print_error( - f"Failed to download model info for {abs_model_path}: {e}" - ) + utils.print_error(f"Failed to download model info for {abs_model_path}: {e}") utils.print_debug("Completed scan model information.") -async def migrate_legacy_information(): +async def migrate_legacy_information(request): import json import yaml from PIL import Image @@ -222,7 +204,7 @@ async def migrate_legacy_information(): folders, extensions = folder_paths.folder_names_and_paths[model_type] for path_index, base_path in enumerate(folders): - files = utils.recursive_search_files(base_path) + files = utils.recursive_search_files(base_path, request) models = folder_paths.filter_files_extensions(files, extensions) diff --git a/py/utils.py b/py/utils.py index d9997f0..b70e691 100644 --- a/py/utils.py +++ b/py/utils.py @@ -103,9 +103,7 @@ def download_web_distribution(version: str): print_info("Extracting web distribution...") with tarfile.open(temp_file, "r:gz") as tar: - members = [ - member for member in tar.getmembers() if member.name.startswith("web/") - ] + members = [member for member in tar.getmembers() if member.name.startswith("web/")] tar.extractall(path=config.extension_uri, members=members) os.remove(temp_file) @@ -154,9 +152,7 @@ def get_valid_full_path(model_type: str, path_index: int, filename: str): if os.path.isfile(full_path): return full_path elif os.path.islink(full_path): - raise RuntimeError( - f"WARNING path {full_path} exists but doesn't link anywhere, skipping." - ) + raise RuntimeError(f"WARNING path {full_path} exists but doesn't link anywhere, skipping.") def get_download_path(): @@ -166,11 +162,29 @@ def get_download_path(): return download_path -def recursive_search_files(directory: str): - files, folder_all = folder_paths.recursive_search( - directory, excluded_dir_names=[".git"] - ) - return [normalize_path(f) for f in files] +def recursive_search_files(directory: str, request): + if not os.path.isdir(directory): + return [] + + excluded_dir_names = [".git"] + result = [] + include_hidden_files = get_setting_value(request, "scan.include_hidden_files", False) + + for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True): + subdirs[:] = [d for d in subdirs if d not in excluded_dir_names] + if not include_hidden_files: + subdirs[:] = [d for d in subdirs if not d.startswith(".")] + filenames[:] = [f for f in filenames if not f.startswith(".")] + + for file_name in filenames: + try: + relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) + result.append(relative_path) + except: + logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.") + continue + + return [normalize_path(f) for f in result] def search_files(directory: str): diff --git a/src/hooks/config.ts b/src/hooks/config.ts index 0674d3d..9465afb 100644 --- a/src/hooks/config.ts +++ b/src/hooks/config.ts @@ -239,5 +239,12 @@ function useAddConfigSettings(store: import('hooks/store').StoreProvider) { }) }, }) + + app.ui?.settings.addSetting({ + id: 'ModelManager.Scan.IncludeHiddenFiles', + name: 'Include hidden files(start with .)', + defaultValue: false, + type: 'boolean', + }) }) }