This commit is contained in:
lllyasviel 2023-08-12 23:28:44 -07:00 committed by GitHub
parent 8720e435f5
commit 59aa2aedeb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 33 additions and 5 deletions

View File

@ -1 +1 @@
version = '1.0.20'
version = '1.0.21'

View File

@ -27,6 +27,7 @@ def worker():
pipeline.refresh_base_model(base_model_name)
pipeline.refresh_refiner_model(refiner_model_name)
pipeline.refresh_loras(loras)
pipeline.clean_prompt_cond_caches()
p_txt, n_txt = apply_style(style_selction, prompt, negative_prompt)

View File

@ -102,18 +102,41 @@ refresh_base_model(modules.path.default_base_model_name)
refresh_refiner_model(modules.path.default_refiner_model_name)
refresh_loras([(modules.path.default_lora_name, 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5)])
positive_conditions_cache = None
negative_conditions_cache = None
positive_conditions_refiner_cache = None
negative_conditions_refiner_cache = None
def clean_prompt_cond_caches():
global positive_conditions_cache, negative_conditions_cache, \
positive_conditions_refiner_cache, negative_conditions_refiner_cache
positive_conditions_cache = None
negative_conditions_cache = None
positive_conditions_refiner_cache = None
negative_conditions_refiner_cache = None
return
@torch.no_grad()
def process(positive_prompt, negative_prompt, steps, switch, width, height, image_seed, callback):
positive_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=positive_prompt)
negative_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=negative_prompt)
global positive_conditions_cache, negative_conditions_cache, \
positive_conditions_refiner_cache, negative_conditions_refiner_cache
positive_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=positive_prompt) if positive_conditions_cache is None else positive_conditions_cache
negative_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=negative_prompt) if negative_conditions_cache is None else negative_conditions_cache
positive_conditions_cache = positive_conditions
negative_conditions_cache = negative_conditions
empty_latent = core.generate_empty_latent(width=width, height=height, batch_size=1)
if xl_refiner is not None:
positive_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=positive_prompt) if positive_conditions_refiner_cache is None else positive_conditions_refiner_cache
negative_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=negative_prompt) if negative_conditions_refiner_cache is None else negative_conditions_refiner_cache
positive_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=positive_prompt)
negative_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=negative_prompt)
positive_conditions_refiner_cache = positive_conditions_refiner
negative_conditions_refiner_cache = negative_conditions_refiner
sampled_latent = core.ksampler_with_refiner(
model=xl_base_patched.unet,

View File

@ -1,5 +1,9 @@
### 1.0.20
* Speed-up text encoder
### 1.0.20
* Re-write UI to use async codes: (1) for faster start, and (2) for better live preview.
* Removed opencv dependency
* Plan to support Linux soon