Merge branch 'develop'
This commit is contained in:
commit
376d69cd91
|
|
@ -34,8 +34,8 @@ args_parser.parser.add_argument("--enable-auto-describe-image", action='store_tr
|
||||||
args_parser.parser.add_argument("--always-download-new-model", action='store_true',
|
args_parser.parser.add_argument("--always-download-new-model", action='store_true',
|
||||||
help="Always download newer models", default=False)
|
help="Always download newer models", default=False)
|
||||||
|
|
||||||
args_parser.parser.add_argument("--rebuild-hash-cache", action='store_true',
|
args_parser.parser.add_argument("--rebuild-hash-cache", help="Generates missing model and LoRA hashes.",
|
||||||
help="Generates missing model and LoRA hashes.", default=False)
|
type=int, nargs="?", metavar="CPU_NUM_THREADS", const=-1)
|
||||||
|
|
||||||
args_parser.parser.set_defaults(
|
args_parser.parser.set_defaults(
|
||||||
disable_cuda_malloc=True,
|
disable_cuda_malloc=True,
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,8 @@ class GroundingDinoModel(Model):
|
||||||
self.load_device = torch.device('cpu')
|
self.load_device = torch.device('cpu')
|
||||||
self.offload_device = torch.device('cpu')
|
self.offload_device = torch.device('cpu')
|
||||||
|
|
||||||
|
@torch.no_grad()
|
||||||
|
@torch.inference_mode()
|
||||||
def predict_with_caption(
|
def predict_with_caption(
|
||||||
self,
|
self,
|
||||||
image: np.ndarray,
|
image: np.ndarray,
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
version = '2.6.0-rc1 (mashb1t)'
|
version = '2.6.0-rc2 (mashb1t)'
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import args_manager
|
||||||
import tempfile
|
import tempfile
|
||||||
import modules.flags
|
import modules.flags
|
||||||
import modules.sdxl_styles
|
import modules.sdxl_styles
|
||||||
from modules.hash_cache import load_cache_from_file, save_cache_to_file
|
from modules.hash_cache import init_cache
|
||||||
|
|
||||||
from modules.model_loader import load_file_from_url
|
from modules.model_loader import load_file_from_url
|
||||||
from modules.extra_utils import makedirs_with_log, get_files_from_folder, try_eval_env_var
|
from modules.extra_utils import makedirs_with_log, get_files_from_folder, try_eval_env_var
|
||||||
|
|
@ -892,20 +892,4 @@ def downloading_sam_vit_h():
|
||||||
|
|
||||||
|
|
||||||
update_files()
|
update_files()
|
||||||
load_cache_from_file()
|
init_cache(model_filenames, paths_checkpoints, lora_filenames, paths_loras)
|
||||||
|
|
||||||
if args_manager.args.rebuild_hash_cache:
|
|
||||||
from modules.hash_cache import sha256_from_cache
|
|
||||||
from modules.util import get_file_from_folder_list
|
|
||||||
|
|
||||||
print('[Cache] Rebuilding hash cache')
|
|
||||||
for filename in model_filenames:
|
|
||||||
filepath = get_file_from_folder_list(filename, paths_checkpoints)
|
|
||||||
sha256_from_cache(filepath)
|
|
||||||
for filename in lora_filenames:
|
|
||||||
filepath = get_file_from_folder_list(filename, paths_loras)
|
|
||||||
sha256_from_cache(filepath)
|
|
||||||
print('[Cache] Done')
|
|
||||||
|
|
||||||
# write cache to file again for sorting and cleanup of invalid cache entries
|
|
||||||
save_cache_to_file()
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,10 @@
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
|
import args_manager
|
||||||
|
from modules.util import get_file_from_folder_list
|
||||||
from modules.util import sha256, HASH_SHA256_LENGTH
|
from modules.util import sha256, HASH_SHA256_LENGTH
|
||||||
|
|
||||||
hash_cache_filename = 'hash_cache.txt'
|
hash_cache_filename = 'hash_cache.txt'
|
||||||
|
|
@ -10,7 +14,9 @@ hash_cache = {}
|
||||||
def sha256_from_cache(filepath):
|
def sha256_from_cache(filepath):
|
||||||
global hash_cache
|
global hash_cache
|
||||||
if filepath not in hash_cache:
|
if filepath not in hash_cache:
|
||||||
|
print(f"[Cache] Calculating sha256 for {filepath}")
|
||||||
hash_value = sha256(filepath)
|
hash_value = sha256(filepath)
|
||||||
|
print(f"[Cache] sha256 for {filepath}: {hash_value}")
|
||||||
hash_cache[filepath] = hash_value
|
hash_cache[filepath] = hash_value
|
||||||
save_cache_to_file(filepath, hash_value)
|
save_cache_to_file(filepath, hash_value)
|
||||||
|
|
||||||
|
|
@ -51,3 +57,28 @@ def save_cache_to_file(filename=None, hash_value=None):
|
||||||
fp.write('\n')
|
fp.write('\n')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'[Cache] Saving failed: {e}')
|
print(f'[Cache] Saving failed: {e}')
|
||||||
|
|
||||||
|
|
||||||
|
def init_cache(model_filenames, paths_checkpoints, lora_filenames, paths_loras):
|
||||||
|
load_cache_from_file()
|
||||||
|
|
||||||
|
if args_manager.args.rebuild_hash_cache:
|
||||||
|
max_workers = args_manager.args.rebuild_hash_cache if args_manager.args.rebuild_hash_cache > 0 else cpu_count()
|
||||||
|
rebuild_cache(lora_filenames, model_filenames, paths_checkpoints, paths_loras, max_workers)
|
||||||
|
|
||||||
|
# write cache to file again for sorting and cleanup of invalid cache entries
|
||||||
|
save_cache_to_file()
|
||||||
|
|
||||||
|
|
||||||
|
def rebuild_cache(lora_filenames, model_filenames, paths_checkpoints, paths_loras, max_workers=cpu_count()):
|
||||||
|
def thread(filename, paths):
|
||||||
|
filepath = get_file_from_folder_list(filename, paths)
|
||||||
|
sha256_from_cache(filepath)
|
||||||
|
|
||||||
|
print('[Cache] Rebuilding hash cache')
|
||||||
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
|
for model_filename in model_filenames:
|
||||||
|
executor.submit(thread, model_filename, paths_checkpoints)
|
||||||
|
for lora_filename in lora_filenames:
|
||||||
|
executor.submit(thread, lora_filename, paths_loras)
|
||||||
|
print('[Cache] Done')
|
||||||
|
|
|
||||||
|
|
@ -176,13 +176,11 @@ def generate_temp_filename(folder='./outputs/', extension='png'):
|
||||||
|
|
||||||
|
|
||||||
def sha256(filename, use_addnet_hash=False, length=HASH_SHA256_LENGTH):
|
def sha256(filename, use_addnet_hash=False, length=HASH_SHA256_LENGTH):
|
||||||
print(f"Calculating sha256 for {filename}: ", end='')
|
|
||||||
if use_addnet_hash:
|
if use_addnet_hash:
|
||||||
with open(filename, "rb") as file:
|
with open(filename, "rb") as file:
|
||||||
sha256_value = addnet_hash_safetensors(file)
|
sha256_value = addnet_hash_safetensors(file)
|
||||||
else:
|
else:
|
||||||
sha256_value = calculate_sha256(filename)
|
sha256_value = calculate_sha256(filename)
|
||||||
print(f"{sha256_value}")
|
|
||||||
|
|
||||||
return sha256_value[:length] if length is not None else sha256_value
|
return sha256_value[:length] if length is not None else sha256_value
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,9 @@
|
||||||
|
# [2.6.0-rc2](https://github.com/mashb1t/Fooocus/releases/tag/v2.6.0-rc2)
|
||||||
|
|
||||||
|
* Add hash generation multi-threading support, change `--rebuild-hash-cache` from bool to int (number of CPU cores)
|
||||||
|
* Fix inference tensor version counter tracking issue for GroundingDINO after using Enhance (see [discussion](https://github.com/lllyasviel/Fooocus/discussions/3213))
|
||||||
|
|
||||||
|
|
||||||
# [2.6.0-rc1](https://github.com/mashb1t/Fooocus/releases/tag/v2.6.0-rc1)
|
# [2.6.0-rc1](https://github.com/mashb1t/Fooocus/releases/tag/v2.6.0-rc1)
|
||||||
|
|
||||||
* Update default models to latest versions
|
* Update default models to latest versions
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue