Merge branch 'feature/add-pony-support-and-preset' into develop

# Conflicts:
#	launch.py
This commit is contained in:
Manuel Schmid 2024-06-30 19:34:56 +02:00
commit 37fe0465b7
No known key found for this signature in database
GPG Key ID: 32C4F7569B40B84B
7 changed files with 74 additions and 4 deletions

View File

@ -98,7 +98,8 @@ if config.temp_path_cleanup_on_launch:
else:
print(f"[Cleanup] Failed to delete content of temp dir.")
def download_models(default_model, previous_default_models, checkpoint_downloads, embeddings_downloads, lora_downloads):
def download_models(default_model, previous_default_models, checkpoint_downloads, embeddings_downloads, lora_downloads, vae_downloads):
for file_name, url in vae_approx_filenames:
load_file_from_url(url=url, model_dir=config.path_vae_approx, file_name=file_name)
@ -130,12 +131,14 @@ def download_models(default_model, previous_default_models, checkpoint_downloads
load_file_from_url(url=url, model_dir=config.path_embeddings, file_name=file_name)
for file_name, url in lora_downloads.items():
load_file_from_url(url=url, model_dir=config.paths_loras[0], file_name=file_name)
for file_name, url in vae_downloads.items():
load_file_from_url(url=url, model_dir=config.path_vae, file_name=file_name)
return default_model, checkpoint_downloads
config.default_base_model_name, config.checkpoint_downloads = download_models(
config.default_base_model_name, config.previous_default_models, config.checkpoint_downloads,
config.embeddings_downloads, config.lora_downloads)
config.embeddings_downloads, config.lora_downloads, config.vae_downloads)
from webui import *

View File

@ -445,6 +445,12 @@ embeddings_downloads = get_config_item_or_set_default(
validator=lambda x: isinstance(x, dict) and all(isinstance(k, str) and isinstance(v, str) for k, v in x.items()),
expected_type=dict
)
vae_downloads = get_config_item_or_set_default(
key='vae_downloads',
default_value={},
validator=lambda x: isinstance(x, dict) and all(isinstance(k, str) and isinstance(v, str) for k, v in x.items()),
expected_type=dict
)
available_aspect_ratios = get_config_item_or_set_default(
key='available_aspect_ratios',
default_value=modules.flags.sdxl_aspect_ratios,

1
presets/.gitignore vendored
View File

@ -3,5 +3,6 @@
!default.json
!lcm.json
!playground_v2.5.json
!pony_v6.json
!realistic.json
!sai.json

53
presets/pony_v6.json Normal file
View File

@ -0,0 +1,53 @@
{
"default_model": "ponyDiffusionV6XL.safetensors",
"default_refiner": "None",
"default_refiner_switch": 0.5,
"default_vae": "ponyDiffusionV6XL_vae.safetensors",
"default_loras": [
[
true,
"None",
1.0
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
]
],
"default_cfg_scale": 7.0,
"default_sample_sharpness": 2.0,
"default_sampler": "dpmpp_2m_sde_gpu",
"default_scheduler": "karras",
"default_performance": "Speed",
"default_prompt": "",
"default_prompt_negative": "",
"default_styles": [
"Fooocus Pony"
],
"default_aspect_ratio": "896*1152",
"default_overwrite_step": -1,
"checkpoint_downloads": {
"ponyDiffusionV6XL.safetensors": "https://huggingface.co/mashb1t/fav_models/resolve/main/fav/ponyDiffusionV6XL.safetensors"
},
"embeddings_downloads": {},
"lora_downloads": {},
"vae_downloads": {
"ponyDiffusionV6XL_vae.safetensors": "https://huggingface.co/mashb1t/fav_models/resolve/main/fav/ponyDiffusionV6XL_vae.safetensors"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 KiB

View File

@ -30,5 +30,10 @@
"name": "Fooocus Cinematic",
"prompt": "cinematic still {prompt} . emotional, harmonious, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy",
"negative_prompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured"
},
{
"name": "Fooocus Pony",
"prompt": "score_9, score_8_up, score_7_up, {prompt}",
"negative_prompt": "score_6, score_5, score_4"
}
]

View File

@ -886,9 +886,11 @@ with shared.gradio_root:
checkpoint_downloads = preset_prepared.get('checkpoint_downloads', {})
embeddings_downloads = preset_prepared.get('embeddings_downloads', {})
lora_downloads = preset_prepared.get('lora_downloads', {})
vae_downloads = preset_prepared.get('vae_downloads', {})
preset_prepared['base_model'], preset_prepared['lora_downloads'] = launch.download_models(
default_model, previous_default_models, checkpoint_downloads, embeddings_downloads, lora_downloads)
preset_prepared['base_model'], preset_prepared['checkpoint_downloads'] = launch.download_models(
default_model, previous_default_models, checkpoint_downloads, embeddings_downloads, lora_downloads,
vae_downloads)
if 'prompt' in preset_prepared and preset_prepared.get('prompt') == '':
del preset_prepared['prompt']