add LoRA handling
This commit is contained in:
parent
f56e3eb3b0
commit
891a1acb62
|
|
@ -350,7 +350,7 @@ possible_preset_keys = {
|
|||
"default_model": "Base Model",
|
||||
"default_refiner": "Refiner Model",
|
||||
"default_refiner_switch": "Refiner Switch",
|
||||
"default_loras": "TODO",
|
||||
"default_loras": "<processed>",
|
||||
"default_cfg_scale": "Guidance Scale",
|
||||
"default_sample_sharpness": "Sharpness",
|
||||
"default_sampler": "Sampler",
|
||||
|
|
|
|||
|
|
@ -150,6 +150,12 @@ def parse_meta_from_preset(preset_content):
|
|||
for k, v in preset_content.items():
|
||||
if k in modules.config.possible_preset_keys:
|
||||
if modules.config.possible_preset_keys[k] is not None:
|
||||
preset_prepared[modules.config.possible_preset_keys[k]] = v
|
||||
if k != "default_loras":
|
||||
preset_prepared[modules.config.possible_preset_keys[k]] = v
|
||||
else:
|
||||
lora_count = 1
|
||||
for lora_value in v[:5]:
|
||||
preset_prepared[f'LoRA {lora_count}'] = ' : '.join(map(str, lora_value))
|
||||
lora_count += 1
|
||||
|
||||
return load_parameter_button_click(json.dumps(preset_prepared))
|
||||
Loading…
Reference in New Issue