Skip hidden model files (#64)
This commit is contained in:
@@ -12,6 +12,9 @@ setting_key = {
|
||||
"download": {
|
||||
"max_task_count": "ModelManager.Download.MaxTaskCount",
|
||||
},
|
||||
"scan": {
|
||||
"include_hidden_files": "ModelManager.Scan.IncludeHiddenFiles"
|
||||
},
|
||||
}
|
||||
|
||||
user_agent = "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148"
|
||||
|
||||
@@ -8,14 +8,14 @@ from . import download
|
||||
from . import searcher
|
||||
|
||||
|
||||
def scan_models():
|
||||
def scan_models(request):
|
||||
result = []
|
||||
model_base_paths = config.model_base_paths
|
||||
for model_type in model_base_paths:
|
||||
|
||||
folders, extensions = folder_paths.folder_names_and_paths[model_type]
|
||||
for path_index, base_path in enumerate(folders):
|
||||
files = utils.recursive_search_files(base_path)
|
||||
files = utils.recursive_search_files(base_path, request)
|
||||
|
||||
models = folder_paths.filter_files_extensions(files, extensions)
|
||||
|
||||
@@ -34,9 +34,7 @@ def scan_models():
|
||||
image_state = os.stat(abs_image_path)
|
||||
image_timestamp = round(image_state.st_mtime_ns / 1000000)
|
||||
image_name = f"{image_name}?ts={image_timestamp}"
|
||||
model_preview = (
|
||||
f"/model-manager/preview/{model_type}/{path_index}/{image_name}"
|
||||
)
|
||||
model_preview = f"/model-manager/preview/{model_type}/{path_index}/{image_name}"
|
||||
|
||||
model_info = {
|
||||
"fullname": fullname,
|
||||
@@ -138,14 +136,14 @@ def fetch_model_info(model_page: str):
|
||||
return result
|
||||
|
||||
|
||||
async def download_model_info(scan_mode: str):
|
||||
async def download_model_info(scan_mode: str, request):
|
||||
utils.print_info(f"Download model info for {scan_mode}")
|
||||
model_base_paths = config.model_base_paths
|
||||
for model_type in model_base_paths:
|
||||
|
||||
folders, extensions = folder_paths.folder_names_and_paths[model_type]
|
||||
for path_index, base_path in enumerate(folders):
|
||||
files = utils.recursive_search_files(base_path)
|
||||
files = utils.recursive_search_files(base_path, request)
|
||||
|
||||
models = folder_paths.filter_files_extensions(files, extensions)
|
||||
|
||||
@@ -161,16 +159,8 @@ async def download_model_info(scan_mode: str):
|
||||
has_preview = os.path.isfile(abs_image_path)
|
||||
|
||||
description_name = utils.get_model_description_name(abs_model_path)
|
||||
abs_description_path = (
|
||||
utils.join_path(base_path, description_name)
|
||||
if description_name
|
||||
else None
|
||||
)
|
||||
has_description = (
|
||||
os.path.isfile(abs_description_path)
|
||||
if abs_description_path
|
||||
else False
|
||||
)
|
||||
abs_description_path = utils.join_path(base_path, description_name) if description_name else None
|
||||
has_description = os.path.isfile(abs_description_path) if abs_description_path else False
|
||||
|
||||
try:
|
||||
|
||||
@@ -185,32 +175,24 @@ async def download_model_info(scan_mode: str):
|
||||
utils.print_debug(f"Calculate sha256 for {abs_model_path}")
|
||||
hash_value = utils.calculate_sha256(abs_model_path)
|
||||
utils.print_info(f"Searching model info by hash {hash_value}")
|
||||
model_info = searcher.CivitaiModelSearcher().search_by_hash(
|
||||
hash_value
|
||||
)
|
||||
model_info = searcher.CivitaiModelSearcher().search_by_hash(hash_value)
|
||||
|
||||
preview_url_list = model_info.get("preview", [])
|
||||
preview_image_url = (
|
||||
preview_url_list[0] if preview_url_list else None
|
||||
)
|
||||
preview_image_url = preview_url_list[0] if preview_url_list else None
|
||||
if preview_image_url:
|
||||
utils.print_debug(f"Save preview image to {abs_image_path}")
|
||||
utils.save_model_preview_image(
|
||||
abs_model_path, preview_image_url
|
||||
)
|
||||
utils.save_model_preview_image(abs_model_path, preview_image_url)
|
||||
|
||||
description = model_info.get("description", None)
|
||||
if description:
|
||||
utils.save_model_description(abs_model_path, description)
|
||||
except Exception as e:
|
||||
utils.print_error(
|
||||
f"Failed to download model info for {abs_model_path}: {e}"
|
||||
)
|
||||
utils.print_error(f"Failed to download model info for {abs_model_path}: {e}")
|
||||
|
||||
utils.print_debug("Completed scan model information.")
|
||||
|
||||
|
||||
async def migrate_legacy_information():
|
||||
async def migrate_legacy_information(request):
|
||||
import json
|
||||
import yaml
|
||||
from PIL import Image
|
||||
@@ -222,7 +204,7 @@ async def migrate_legacy_information():
|
||||
|
||||
folders, extensions = folder_paths.folder_names_and_paths[model_type]
|
||||
for path_index, base_path in enumerate(folders):
|
||||
files = utils.recursive_search_files(base_path)
|
||||
files = utils.recursive_search_files(base_path, request)
|
||||
|
||||
models = folder_paths.filter_files_extensions(files, extensions)
|
||||
|
||||
|
||||
36
py/utils.py
36
py/utils.py
@@ -103,9 +103,7 @@ def download_web_distribution(version: str):
|
||||
|
||||
print_info("Extracting web distribution...")
|
||||
with tarfile.open(temp_file, "r:gz") as tar:
|
||||
members = [
|
||||
member for member in tar.getmembers() if member.name.startswith("web/")
|
||||
]
|
||||
members = [member for member in tar.getmembers() if member.name.startswith("web/")]
|
||||
tar.extractall(path=config.extension_uri, members=members)
|
||||
|
||||
os.remove(temp_file)
|
||||
@@ -154,9 +152,7 @@ def get_valid_full_path(model_type: str, path_index: int, filename: str):
|
||||
if os.path.isfile(full_path):
|
||||
return full_path
|
||||
elif os.path.islink(full_path):
|
||||
raise RuntimeError(
|
||||
f"WARNING path {full_path} exists but doesn't link anywhere, skipping."
|
||||
)
|
||||
raise RuntimeError(f"WARNING path {full_path} exists but doesn't link anywhere, skipping.")
|
||||
|
||||
|
||||
def get_download_path():
|
||||
@@ -166,11 +162,29 @@ def get_download_path():
|
||||
return download_path
|
||||
|
||||
|
||||
def recursive_search_files(directory: str):
|
||||
files, folder_all = folder_paths.recursive_search(
|
||||
directory, excluded_dir_names=[".git"]
|
||||
)
|
||||
return [normalize_path(f) for f in files]
|
||||
def recursive_search_files(directory: str, request):
|
||||
if not os.path.isdir(directory):
|
||||
return []
|
||||
|
||||
excluded_dir_names = [".git"]
|
||||
result = []
|
||||
include_hidden_files = get_setting_value(request, "scan.include_hidden_files", False)
|
||||
|
||||
for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True):
|
||||
subdirs[:] = [d for d in subdirs if d not in excluded_dir_names]
|
||||
if not include_hidden_files:
|
||||
subdirs[:] = [d for d in subdirs if not d.startswith(".")]
|
||||
filenames[:] = [f for f in filenames if not f.startswith(".")]
|
||||
|
||||
for file_name in filenames:
|
||||
try:
|
||||
relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory)
|
||||
result.append(relative_path)
|
||||
except:
|
||||
logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.")
|
||||
continue
|
||||
|
||||
return [normalize_path(f) for f in result]
|
||||
|
||||
|
||||
def search_files(directory: str):
|
||||
|
||||
Reference in New Issue
Block a user