Skip to content

Commit

Permalink
Skip hidden model files (#64)
Browse files Browse the repository at this point in the history
  • Loading branch information
hayden-fr authored Nov 28, 2024
1 parent 8b6c6eb commit e891630
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 45 deletions.
6 changes: 3 additions & 3 deletions __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ async def read_models(request):
Scan all models and read their information.
"""
try:
result = services.scan_models()
result = services.scan_models(request)
return web.json_response({"success": True, "data": result})
except Exception as e:
error_msg = f"Read models failed: {str(e)}"
Expand Down Expand Up @@ -232,7 +232,7 @@ async def download_model_info(request):
post = await utils.get_request_body(request)
try:
scan_mode = post.get("scanMode", "diff")
await services.download_model_info(scan_mode)
await services.download_model_info(scan_mode, request)
return web.json_response({"success": True})
except Exception as e:
error_msg = f"Download model info failed: {str(e)}"
Expand Down Expand Up @@ -288,7 +288,7 @@ async def migrate_legacy_information(request):
Migrate legacy information.
"""
try:
await services.migrate_legacy_information()
await services.migrate_legacy_information(request)
return web.json_response({"success": True})
except Exception as e:
error_msg = f"Migrate model info failed: {str(e)}"
Expand Down
3 changes: 3 additions & 0 deletions py/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
"download": {
"max_task_count": "ModelManager.Download.MaxTaskCount",
},
"scan": {
"include_hidden_files": "ModelManager.Scan.IncludeHiddenFiles"
},
}

user_agent = "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148"
Expand Down
44 changes: 13 additions & 31 deletions py/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@
from . import searcher


def scan_models():
def scan_models(request):
result = []
model_base_paths = config.model_base_paths
for model_type in model_base_paths:

folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path)
files = utils.recursive_search_files(base_path, request)

models = folder_paths.filter_files_extensions(files, extensions)

Expand All @@ -34,9 +34,7 @@ def scan_models():
image_state = os.stat(abs_image_path)
image_timestamp = round(image_state.st_mtime_ns / 1000000)
image_name = f"{image_name}?ts={image_timestamp}"
model_preview = (
f"/model-manager/preview/{model_type}/{path_index}/{image_name}"
)
model_preview = f"/model-manager/preview/{model_type}/{path_index}/{image_name}"

model_info = {
"fullname": fullname,
Expand Down Expand Up @@ -138,14 +136,14 @@ def fetch_model_info(model_page: str):
return result


async def download_model_info(scan_mode: str):
async def download_model_info(scan_mode: str, request):
utils.print_info(f"Download model info for {scan_mode}")
model_base_paths = config.model_base_paths
for model_type in model_base_paths:

folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path)
files = utils.recursive_search_files(base_path, request)

models = folder_paths.filter_files_extensions(files, extensions)

Expand All @@ -161,16 +159,8 @@ async def download_model_info(scan_mode: str):
has_preview = os.path.isfile(abs_image_path)

description_name = utils.get_model_description_name(abs_model_path)
abs_description_path = (
utils.join_path(base_path, description_name)
if description_name
else None
)
has_description = (
os.path.isfile(abs_description_path)
if abs_description_path
else False
)
abs_description_path = utils.join_path(base_path, description_name) if description_name else None
has_description = os.path.isfile(abs_description_path) if abs_description_path else False

try:

Expand All @@ -185,32 +175,24 @@ async def download_model_info(scan_mode: str):
utils.print_debug(f"Calculate sha256 for {abs_model_path}")
hash_value = utils.calculate_sha256(abs_model_path)
utils.print_info(f"Searching model info by hash {hash_value}")
model_info = searcher.CivitaiModelSearcher().search_by_hash(
hash_value
)
model_info = searcher.CivitaiModelSearcher().search_by_hash(hash_value)

preview_url_list = model_info.get("preview", [])
preview_image_url = (
preview_url_list[0] if preview_url_list else None
)
preview_image_url = preview_url_list[0] if preview_url_list else None
if preview_image_url:
utils.print_debug(f"Save preview image to {abs_image_path}")
utils.save_model_preview_image(
abs_model_path, preview_image_url
)
utils.save_model_preview_image(abs_model_path, preview_image_url)

description = model_info.get("description", None)
if description:
utils.save_model_description(abs_model_path, description)
except Exception as e:
utils.print_error(
f"Failed to download model info for {abs_model_path}: {e}"
)
utils.print_error(f"Failed to download model info for {abs_model_path}: {e}")

utils.print_debug("Completed scan model information.")


async def migrate_legacy_information():
async def migrate_legacy_information(request):
import json
import yaml
from PIL import Image
Expand All @@ -222,7 +204,7 @@ async def migrate_legacy_information():

folders, extensions = folder_paths.folder_names_and_paths[model_type]
for path_index, base_path in enumerate(folders):
files = utils.recursive_search_files(base_path)
files = utils.recursive_search_files(base_path, request)

models = folder_paths.filter_files_extensions(files, extensions)

Expand Down
36 changes: 25 additions & 11 deletions py/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,7 @@ def download_web_distribution(version: str):

print_info("Extracting web distribution...")
with tarfile.open(temp_file, "r:gz") as tar:
members = [
member for member in tar.getmembers() if member.name.startswith("web/")
]
members = [member for member in tar.getmembers() if member.name.startswith("web/")]
tar.extractall(path=config.extension_uri, members=members)

os.remove(temp_file)
Expand Down Expand Up @@ -154,9 +152,7 @@ def get_valid_full_path(model_type: str, path_index: int, filename: str):
if os.path.isfile(full_path):
return full_path
elif os.path.islink(full_path):
raise RuntimeError(
f"WARNING path {full_path} exists but doesn't link anywhere, skipping."
)
raise RuntimeError(f"WARNING path {full_path} exists but doesn't link anywhere, skipping.")


def get_download_path():
Expand All @@ -166,11 +162,29 @@ def get_download_path():
return download_path


def recursive_search_files(directory: str):
files, folder_all = folder_paths.recursive_search(
directory, excluded_dir_names=[".git"]
)
return [normalize_path(f) for f in files]
def recursive_search_files(directory: str, request):
if not os.path.isdir(directory):
return []

excluded_dir_names = [".git"]
result = []
include_hidden_files = get_setting_value(request, "scan.include_hidden_files", False)

for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True):
subdirs[:] = [d for d in subdirs if d not in excluded_dir_names]
if not include_hidden_files:
subdirs[:] = [d for d in subdirs if not d.startswith(".")]
filenames[:] = [f for f in filenames if not f.startswith(".")]

for file_name in filenames:
try:
relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory)
result.append(relative_path)
except:
logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.")
continue

return [normalize_path(f) for f in result]


def search_files(directory: str):
Expand Down
7 changes: 7 additions & 0 deletions src/hooks/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,5 +239,12 @@ function useAddConfigSettings(store: import('hooks/store').StoreProvider) {
})
},
})

app.ui?.settings.addSetting({
id: 'ModelManager.Scan.IncludeHiddenFiles',
name: 'Include hidden files(start with .)',
defaultValue: false,
type: 'boolean',
})
})
}

0 comments on commit e891630

Please sign in to comment.