From fb94394b10807b4ae23d21fe69e703dc85abae94 Mon Sep 17 00:00:00 2001 From: Manuel Schmid <9307310+mashb1t@users.noreply.github.com> Date: Sun, 3 Mar 2024 18:46:26 +0100 Subject: [PATCH 1/7] fix: add fallback value for default_max_lora_number when default_loras is empty (#2430) --- modules/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/config.py b/modules/config.py index 09c8fd7c..a68bd218 100644 --- a/modules/config.py +++ b/modules/config.py @@ -264,7 +264,7 @@ default_loras = get_config_item_or_set_default( ) default_max_lora_number = get_config_item_or_set_default( key='default_max_lora_number', - default_value=len(default_loras), + default_value=len(default_loras) if isinstance(default_loras, list) and len(default_loras) > 0 else 5, validator=lambda x: isinstance(x, int) and x >= 1 ) default_cfg_scale = get_config_item_or_set_default( From c3fd57acb9dc29383a81542b07ae8c2ac863a1ea Mon Sep 17 00:00:00 2001 From: Manuel Schmid <9307310+mashb1t@users.noreply.github.com> Date: Sun, 3 Mar 2024 19:34:38 +0100 Subject: [PATCH 2/7] feat: add metadata flag and steps override to history log (#2425) * feat: add metadata hint to history log * feat: add actual metadata_scheme to log instead of only boolean * feat: add steps to log if they were overridden * fix: pass copy of metadata prevents LoRA file extension removal in history log caused by passing reference to meta_parser fooocus scheme --- modules/async_worker.py | 36 ++++++++++++++++++++---------------- modules/private_logger.py | 4 ++-- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/modules/async_worker.py b/modules/async_worker.py index fd785f07..a8661f4d 100644 --- a/modules/async_worker.py +++ b/modules/async_worker.py @@ -830,17 +830,21 @@ def worker(): ('Negative Prompt', 'negative_prompt', task['log_negative_prompt']), ('Fooocus V2 Expansion', 'prompt_expansion', task['expansion']), ('Styles', 'styles', str(raw_style_selections)), - ('Performance', 'performance', performance_selection.value), - ('Resolution', 'resolution', str((width, height))), - ('Guidance Scale', 'guidance_scale', guidance_scale), - ('Sharpness', 'sharpness', sharpness), - ('ADM Guidance', 'adm_guidance', str(( - modules.patch.patch_settings[pid].positive_adm_scale, - modules.patch.patch_settings[pid].negative_adm_scale, - modules.patch.patch_settings[pid].adm_scaler_end))), - ('Base Model', 'base_model', base_model_name), - ('Refiner Model', 'refiner_model', refiner_model_name), - ('Refiner Switch', 'refiner_switch', refiner_switch)] + ('Performance', 'performance', performance_selection.value)] + + if performance_selection.steps() != steps: + d.append(('Steps', 'steps', steps)) + + d += [('Resolution', 'resolution', str((width, height))), + ('Guidance Scale', 'guidance_scale', guidance_scale), + ('Sharpness', 'sharpness', sharpness), + ('ADM Guidance', 'adm_guidance', str(( + modules.patch.patch_settings[pid].positive_adm_scale, + modules.patch.patch_settings[pid].negative_adm_scale, + modules.patch.patch_settings[pid].adm_scaler_end))), + ('Base Model', 'base_model', base_model_name), + ('Refiner Model', 'refiner_model', refiner_model_name), + ('Refiner Switch', 'refiner_switch', refiner_switch)] if refiner_model_name != 'None': if overwrite_switch > 0: @@ -857,17 +861,17 @@ def worker(): if freeu_enabled: d.append(('FreeU', 'freeu', str((freeu_b1, freeu_b2, freeu_s1, freeu_s2)))) + for li, (n, w) in enumerate(loras): + if n != 'None': + d.append((f'LoRA {li + 1}', f'lora_combined_{li + 1}', f'{n} : {w}')) + metadata_parser = None if save_metadata_to_images: metadata_parser = modules.meta_parser.get_metadata_parser(metadata_scheme) metadata_parser.set_data(task['log_positive_prompt'], task['positive'], task['log_negative_prompt'], task['negative'], steps, base_model_name, refiner_model_name, loras) - - for li, (n, w) in enumerate(loras): - if n != 'None': - d.append((f'LoRA {li + 1}', f'lora_combined_{li + 1}', f'{n} : {w}')) - + d.append(('Metadata Scheme', 'metadata_scheme', metadata_scheme.value if save_metadata_to_images else save_metadata_to_images)) d.append(('Version', 'version', 'Fooocus v' + fooocus_version.version)) img_paths.append(log(x, d, metadata_parser, output_format)) diff --git a/modules/private_logger.py b/modules/private_logger.py index 8fa5f73c..01e570a7 100644 --- a/modules/private_logger.py +++ b/modules/private_logger.py @@ -26,7 +26,7 @@ def log(img, metadata, metadata_parser: MetadataParser | None = None, output_for date_string, local_temp_filename, only_name = generate_temp_filename(folder=path_outputs, extension=output_format) os.makedirs(os.path.dirname(local_temp_filename), exist_ok=True) - parsed_parameters = metadata_parser.parse_string(metadata) if metadata_parser is not None else '' + parsed_parameters = metadata_parser.parse_string(metadata.copy()) if metadata_parser is not None else '' image = Image.fromarray(img) if output_format == 'png': @@ -90,7 +90,7 @@ def log(img, metadata, metadata_parser: MetadataParser | None = None, output_for """ ) - begin_part = f"
Fooocus Log {date_string} (private)
\nAll images are clean, without any hidden data/meta, and safe to share with others.
\n\n" + begin_part = f"Fooocus Log {date_string} (private)
\nMetadata is embedded if enabled in the config or developer debug mode. You can find the information for each image in line Metadata Scheme.
\n\n" end_part = f'\n' middle_part = log_cache.get(html_name, "") From e241c53f0e20df66135625f8751dec58a4ca6cb5 Mon Sep 17 00:00:00 2001 From: Manuel Schmid <9307310+mashb1t@users.noreply.github.com> Date: Sun, 3 Mar 2024 21:15:42 +0100 Subject: [PATCH 3/7] feat: adjust width of lora_weight for firefox (#2431) --- modules/html.py | 14 ++++---------- webui.py | 6 +++--- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/modules/html.py b/modules/html.py index 47a1483a..769151a9 100644 --- a/modules/html.py +++ b/modules/html.py @@ -112,10 +112,6 @@ progress::after { margin-left: -5px !important; } -.lora_enable { - flex-grow: 1 !important; -} - .lora_enable label { height: 100%; } @@ -128,12 +124,10 @@ progress::after { display: none; } -.lora_model { - flex-grow: 5 !important; -} - -.lora_weight { - flex-grow: 5 !important; +@-moz-document url-prefix() { + .lora_weight input[type=number] { + width: 80px; + } } ''' diff --git a/webui.py b/webui.py index 180c7d2b..944f49b7 100644 --- a/webui.py +++ b/webui.py @@ -355,13 +355,13 @@ with shared.gradio_root: for i, (n, v) in enumerate(modules.config.default_loras): with gr.Row(): lora_enabled = gr.Checkbox(label='Enable', value=True, - elem_classes=['lora_enable', 'min_check']) + elem_classes=['lora_enable', 'min_check'], scale=1) lora_model = gr.Dropdown(label=f'LoRA {i + 1}', choices=['None'] + modules.config.lora_filenames, value=n, - elem_classes='lora_model') + elem_classes='lora_model', scale=5) lora_weight = gr.Slider(label='Weight', minimum=modules.config.default_loras_min_weight, maximum=modules.config.default_loras_max_weight, step=0.01, value=v, - elem_classes='lora_weight') + elem_classes='lora_weight', scale=5) lora_ctrls += [lora_enabled, lora_model, lora_weight] with gr.Row(): From e965bfc39caaef96a08f2198633a50815afb2b02 Mon Sep 17 00:00:00 2001 From: eddyizm