feat: add model hash support for a1111
This commit is contained in:
parent
191f8148e4
commit
f7489cc9ef
|
|
@ -14,6 +14,7 @@ async_tasks = []
|
||||||
def worker():
|
def worker():
|
||||||
global async_tasks
|
global async_tasks
|
||||||
|
|
||||||
|
import os
|
||||||
import traceback
|
import traceback
|
||||||
import math
|
import math
|
||||||
import json
|
import json
|
||||||
|
|
@ -42,7 +43,7 @@ def worker():
|
||||||
from modules.private_logger import log
|
from modules.private_logger import log
|
||||||
from extras.expansion import safe_str
|
from extras.expansion import safe_str
|
||||||
from modules.util import remove_empty_str, HWC3, resize_image, \
|
from modules.util import remove_empty_str, HWC3, resize_image, \
|
||||||
get_image_shape_ceil, set_image_shape_ceil, get_shape_ceil, resample_image, erode_or_dilate
|
get_image_shape_ceil, set_image_shape_ceil, get_shape_ceil, resample_image, erode_or_dilate, calculate_sha256, quote
|
||||||
from modules.upscaler import perform_upscale
|
from modules.upscaler import perform_upscale
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -201,6 +202,17 @@ def worker():
|
||||||
modules.patch.adm_scaler_end = advanced_parameters.adm_scaler_end = 0.0
|
modules.patch.adm_scaler_end = advanced_parameters.adm_scaler_end = 0.0
|
||||||
steps = 8
|
steps = 8
|
||||||
|
|
||||||
|
base_model_path = os.path.join(modules.config.path_checkpoints, base_model_name)
|
||||||
|
base_model_hash = calculate_sha256(base_model_path)[0:10]
|
||||||
|
|
||||||
|
lora_hashes = []
|
||||||
|
for (n, w) in loras:
|
||||||
|
if n != 'None':
|
||||||
|
lora_path = os.path.join(modules.config.path_loras, n)
|
||||||
|
lora_hashes.append(f'{n.split('.')[0]}: {calculate_sha256(lora_path)[0:10]}')
|
||||||
|
lora_hashes_string = ", ".join(lora_hashes)
|
||||||
|
print(lora_hashes_string)
|
||||||
|
|
||||||
modules.patch.adaptive_cfg = advanced_parameters.adaptive_cfg
|
modules.patch.adaptive_cfg = advanced_parameters.adaptive_cfg
|
||||||
print(f'[Parameters] Adaptive CFG = {modules.patch.adaptive_cfg}')
|
print(f'[Parameters] Adaptive CFG = {modules.patch.adaptive_cfg}')
|
||||||
|
|
||||||
|
|
@ -854,16 +866,17 @@ def worker():
|
||||||
"CFG scale": cfg_scale,
|
"CFG scale": cfg_scale,
|
||||||
"Seed": task['task_seed'],
|
"Seed": task['task_seed'],
|
||||||
"Size": f"{width}x{height}",
|
"Size": f"{width}x{height}",
|
||||||
#"Model hash": p.sd_model_hash if opts.add_model_hash_to_info else None,
|
"Model hash": base_model_hash,
|
||||||
"Model": base_model_name,
|
"Model": base_model_name.split('.')[0],
|
||||||
|
"Lora hashes": lora_hashes_string,
|
||||||
"Denoising strength": denoising_strength,
|
"Denoising strength": denoising_strength,
|
||||||
"Version": f'Fooocus v{fooocus_version.version}',
|
"Version": f'Fooocus v{fooocus_version.version}',
|
||||||
"User": 'mashb1t',
|
"User": 'mashb1t'
|
||||||
}
|
}
|
||||||
|
|
||||||
generation_params_text = ", ".join([k if k == v else f'{k}: {v}' for k, v in generation_params.items() if v is not None])
|
generation_params_text = ", ".join([k if k == v else f'{k}: {quote(v)}' for k, v in generation_params.items() if v is not None])
|
||||||
negative_prompt_text = f"\nNegative prompt: {raw_negative_prompt}" if raw_negative_prompt else ""
|
negative_prompt_text = f"\nNegative prompt: {raw_negative_prompt}" if raw_negative_prompt else ""
|
||||||
metadata_string = f"{raw_prompt}{raw_negative_prompt}\n{generation_params_text}".strip()
|
metadata_string = f"{raw_prompt}{negative_prompt_text}\n{generation_params_text}".strip()
|
||||||
|
|
||||||
for x in imgs:
|
for x in imgs:
|
||||||
d = [
|
d = [
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,10 @@ import random
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import cv2
|
import cv2
|
||||||
|
import json
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
|
||||||
LANCZOS = (Image.Resampling.LANCZOS if hasattr(Image, 'Resampling') else Image.LANCZOS)
|
LANCZOS = (Image.Resampling.LANCZOS if hasattr(Image, 'Resampling') else Image.LANCZOS)
|
||||||
|
|
@ -175,3 +177,19 @@ def get_files_from_folder(folder_path, exensions=None, name_filter=None):
|
||||||
filenames.append(path)
|
filenames.append(path)
|
||||||
|
|
||||||
return sorted(filenames, key=lambda x: -1 if os.sep in x else 1)
|
return sorted(filenames, key=lambda x: -1 if os.sep in x else 1)
|
||||||
|
|
||||||
|
def calculate_sha256(filename):
|
||||||
|
hash_sha256 = sha256()
|
||||||
|
blksize = 1024 * 1024
|
||||||
|
|
||||||
|
with open(filename, "rb") as f:
|
||||||
|
for chunk in iter(lambda: f.read(blksize), b""):
|
||||||
|
hash_sha256.update(chunk)
|
||||||
|
|
||||||
|
return hash_sha256.hexdigest()
|
||||||
|
|
||||||
|
def quote(text):
|
||||||
|
if ',' not in str(text) and '\n' not in str(text) and ':' not in str(text):
|
||||||
|
return text
|
||||||
|
|
||||||
|
return json.dumps(text, ensure_ascii=False)
|
||||||
Loading…
Reference in New Issue