Merge branch 'feature/persistent-hashing' into develop

This commit is contained in:
Manuel Schmid 2024-07-01 18:00:23 +02:00
commit fbb64533b9
No known key found for this signature in database
GPG Key ID: 32C4F7569B40B84B
5 changed files with 82 additions and 15 deletions

1
.gitignore vendored
View File

@ -10,6 +10,7 @@ __pycache__
*.partial
*.onnx
sorted_styles.json
hash_cache.txt
/input
/cache
/language/default.json

View File

@ -32,7 +32,10 @@ args_parser.parser.add_argument("--enable-auto-describe-image", action='store_tr
help="Enables automatic description of uov and enhance image when prompt is empty", default=False)
args_parser.parser.add_argument("--always-download-new-model", action='store_true',
help="Always download newer models ", default=False)
help="Always download newer models", default=False)
args_parser.parser.add_argument("--rebuild-hash-cache", action='store_true',
help="Generates missing model and LoRA hashes.", default=False)
args_parser.parser.set_defaults(
disable_cuda_malloc=True,

View File

@ -7,6 +7,7 @@ import args_manager
import tempfile
import modules.flags
import modules.sdxl_styles
from modules.hash_cache import load_cache_from_file, save_cache_to_file
from modules.model_loader import load_file_from_url
from modules.extra_utils import makedirs_with_log, get_files_from_folder, try_eval_env_var
@ -889,3 +890,20 @@ def downloading_sam_vit_h():
update_files()
load_cache_from_file()
if args_manager.args.rebuild_hash_cache:
from modules.hash_cache import sha256_from_cache
from modules.util import get_file_from_folder_list
print('[Cache] Rebuilding hash cache')
for filename in model_filenames:
filepath = get_file_from_folder_list(filename, paths_checkpoints)
sha256_from_cache(filepath)
for filename in lora_filenames:
filepath = get_file_from_folder_list(filename, paths_loras)
sha256_from_cache(filepath)
print('[Cache] Done')
# write cache to file again for sorting and cleanup of invalid cache entries
save_cache_to_file()

53
modules/hash_cache.py Normal file
View File

@ -0,0 +1,53 @@
import json
import os
from modules.util import sha256, HASH_SHA256_LENGTH
hash_cache_filename = 'hash_cache.txt'
hash_cache = {}
def sha256_from_cache(filepath):
global hash_cache
if filepath not in hash_cache:
hash_value = sha256(filepath)
hash_cache[filepath] = hash_value
save_cache_to_file(filepath, hash_value)
return hash_cache[filepath]
def load_cache_from_file():
global hash_cache
try:
if os.path.exists(hash_cache_filename):
with open(hash_cache_filename, 'rt', encoding='utf-8') as fp:
for line in fp:
entry = json.loads(line)
for filepath, hash_value in entry.items():
if not os.path.exists(filepath) or not isinstance(hash_value, str) and len(hash_value) != HASH_SHA256_LENGTH:
print(f'[Cache] Skipping invalid cache entry: {filepath}')
continue
hash_cache[filepath] = hash_value
except Exception as e:
print(f'[Cache] Loading failed: {e}')
def save_cache_to_file(filename=None, hash_value=None):
global hash_cache
if filename is not None and hash_value is not None:
items = [(filename, hash_value)]
mode = 'at'
else:
items = sorted(hash_cache.items())
mode = 'wt'
try:
with open(hash_cache_filename, mode, encoding='utf-8') as fp:
for filepath, hash_value in items:
json.dump({filepath: hash_value}, fp)
fp.write('\n')
except Exception as e:
print(f'[Cache] Saving failed: {e}')

View File

@ -9,16 +9,16 @@ from PIL import Image
import fooocus_version
import modules.config
import modules.sdxl_styles
from modules import hash_cache
from modules.flags import MetadataScheme, Performance, Steps
from modules.flags import SAMPLERS, CIVITAI_NO_KARRAS
from modules.util import quote, unquote, extract_styles_from_prompt, is_json, get_file_from_folder_list, sha256
from modules.hash_cache import sha256_from_cache
from modules.util import quote, unquote, extract_styles_from_prompt, is_json, get_file_from_folder_list
re_param_code = r'\s*(\w[\w \-/]+):\s*("(?:\\.|[^\\"])+"|[^,]*)(?:,|$)'
re_param = re.compile(re_param_code)
re_imagesize = re.compile(r"^(\d+)x(\d+)$")
hash_cache = {}
def load_parameter_button_click(raw_metadata: dict | str, is_generating: bool):
loaded_parameter_dict = raw_metadata
@ -230,14 +230,6 @@ def get_lora(key: str, fallback: str | None, source_dict: dict, results: list, p
results.append(1)
def get_sha256(filepath):
global hash_cache
if filepath not in hash_cache:
hash_cache[filepath] = sha256(filepath)
return hash_cache[filepath]
def parse_meta_from_preset(preset_content):
assert isinstance(preset_content, dict)
preset_prepared = {}
@ -304,18 +296,18 @@ class MetadataParser(ABC):
self.base_model_name = Path(base_model_name).stem
base_model_path = get_file_from_folder_list(base_model_name, modules.config.paths_checkpoints)
self.base_model_hash = get_sha256(base_model_path)
self.base_model_hash = sha256_from_cache(base_model_path)
if refiner_model_name not in ['', 'None']:
self.refiner_model_name = Path(refiner_model_name).stem
refiner_model_path = get_file_from_folder_list(refiner_model_name, modules.config.paths_checkpoints)
self.refiner_model_hash = get_sha256(refiner_model_path)
self.refiner_model_hash = sha256_from_cache(refiner_model_path)
self.loras = []
for (lora_name, lora_weight) in loras:
if lora_name != 'None':
lora_path = get_file_from_folder_list(lora_name, modules.config.paths_loras)
lora_hash = get_sha256(lora_path)
lora_hash = sha256_from_cache(lora_path)
self.loras.append((Path(lora_name).stem, lora_weight, lora_hash))
self.vae_name = Path(vae_name).stem