Merge branch 'main_upstream' into develop_upstream
This commit is contained in:
commit
cb24c686b0
|
|
@ -23,7 +23,7 @@ RUN chown -R user:user /content
|
||||||
WORKDIR /content
|
WORKDIR /content
|
||||||
USER user
|
USER user
|
||||||
|
|
||||||
COPY . /content/app
|
COPY --chown=user:user . /content/app
|
||||||
RUN mv /content/app/models /content/app/models.org
|
RUN mv /content/app/models /content/app/models.org
|
||||||
|
|
||||||
CMD [ "sh", "-c", "/content/entrypoint.sh ${CMDARGS}" ]
|
CMD [ "sh", "-c", "/content/entrypoint.sh ${CMDARGS}" ]
|
||||||
|
|
|
||||||
|
|
@ -74,31 +74,35 @@ progress::after {
|
||||||
text-align: right;
|
text-align: right;
|
||||||
width: 215px;
|
width: 215px;
|
||||||
}
|
}
|
||||||
|
div:has(> #positive_prompt) {
|
||||||
.type_row{
|
border: none;
|
||||||
height: 80px !important;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.type_row_half{
|
#positive_prompt {
|
||||||
height: 32px !important;
|
padding: 1px;
|
||||||
|
background: var(--background-fill-primary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.scroll-hide{
|
.type_row {
|
||||||
resize: none !important;
|
height: 84px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.refresh_button{
|
.type_row_half {
|
||||||
|
height: 34px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.refresh_button {
|
||||||
border: none !important;
|
border: none !important;
|
||||||
background: none !important;
|
background: none !important;
|
||||||
font-size: none !important;
|
font-size: none !important;
|
||||||
box-shadow: none !important;
|
box-shadow: none !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.advanced_check_row{
|
.advanced_check_row {
|
||||||
width: 250px !important;
|
width: 250px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.min_check{
|
.min_check {
|
||||||
min-width: min(1px, 100%) !important;
|
min-width: min(1px, 100%) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -107,8 +107,7 @@ class SDTurboScheduler:
|
||||||
def get_sigmas(self, model, steps, denoise):
|
def get_sigmas(self, model, steps, denoise):
|
||||||
start_step = 10 - int(10 * denoise)
|
start_step = 10 - int(10 * denoise)
|
||||||
timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps]
|
timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps]
|
||||||
ldm_patched.modules.model_management.load_models_gpu([model])
|
sigmas = model.model_sampling.sigma(timesteps)
|
||||||
sigmas = model.model.model_sampling.sigma(timesteps)
|
|
||||||
sigmas = torch.cat([sigmas, sigmas.new_zeros([1])])
|
sigmas = torch.cat([sigmas, sigmas.new_zeros([1])])
|
||||||
return (sigmas, )
|
return (sigmas, )
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -175,7 +175,7 @@ def calculate_sigmas_scheduler_hacked(model, scheduler_name, steps):
|
||||||
elif scheduler_name == "sgm_uniform":
|
elif scheduler_name == "sgm_uniform":
|
||||||
sigmas = normal_scheduler(model, steps, sgm=True)
|
sigmas = normal_scheduler(model, steps, sgm=True)
|
||||||
elif scheduler_name == "turbo":
|
elif scheduler_name == "turbo":
|
||||||
sigmas = SDTurboScheduler().get_sigmas(namedtuple('Patcher', ['model'])(model=model), steps=steps, denoise=1.0)[0]
|
sigmas = SDTurboScheduler().get_sigmas(model=model, steps=steps, denoise=1.0)[0]
|
||||||
elif scheduler_name == "align_your_steps":
|
elif scheduler_name == "align_your_steps":
|
||||||
model_type = 'SDXL' if isinstance(model.latent_format, ldm_patched.modules.latent_formats.SDXL) else 'SD1'
|
model_type = 'SDXL' if isinstance(model.latent_format, ldm_patched.modules.latent_formats.SDXL) else 'SD1'
|
||||||
sigmas = AlignYourStepsScheduler().get_sigmas(model_type=model_type, steps=steps, denoise=1.0)[0]
|
sigmas = AlignYourStepsScheduler().get_sigmas(model_type=model_type, steps=steps, denoise=1.0)[0]
|
||||||
|
|
|
||||||
27
readme.md
27
readme.md
|
|
@ -370,25 +370,36 @@ entry_with_update.py [-h] [--listen [IP]] [--port PORT]
|
||||||
[--web-upload-size WEB_UPLOAD_SIZE]
|
[--web-upload-size WEB_UPLOAD_SIZE]
|
||||||
[--hf-mirror HF_MIRROR]
|
[--hf-mirror HF_MIRROR]
|
||||||
[--external-working-path PATH [PATH ...]]
|
[--external-working-path PATH [PATH ...]]
|
||||||
[--output-path OUTPUT_PATH] [--temp-path TEMP_PATH]
|
[--output-path OUTPUT_PATH]
|
||||||
|
[--temp-path TEMP_PATH]
|
||||||
[--cache-path CACHE_PATH] [--in-browser]
|
[--cache-path CACHE_PATH] [--in-browser]
|
||||||
[--disable-in-browser] [--gpu-device-id DEVICE_ID]
|
[--disable-in-browser]
|
||||||
|
[--gpu-device-id DEVICE_ID]
|
||||||
[--async-cuda-allocation | --disable-async-cuda-allocation]
|
[--async-cuda-allocation | --disable-async-cuda-allocation]
|
||||||
[--disable-attention-upcast] [--all-in-fp32 | --all-in-fp16]
|
[--disable-attention-upcast]
|
||||||
|
[--all-in-fp32 | --all-in-fp16]
|
||||||
[--unet-in-bf16 | --unet-in-fp16 | --unet-in-fp8-e4m3fn | --unet-in-fp8-e5m2]
|
[--unet-in-bf16 | --unet-in-fp16 | --unet-in-fp8-e4m3fn | --unet-in-fp8-e5m2]
|
||||||
[--vae-in-fp16 | --vae-in-fp32 | --vae-in-bf16]
|
[--vae-in-fp16 | --vae-in-fp32 | --vae-in-bf16]
|
||||||
|
[--vae-in-cpu]
|
||||||
[--clip-in-fp8-e4m3fn | --clip-in-fp8-e5m2 | --clip-in-fp16 | --clip-in-fp32]
|
[--clip-in-fp8-e4m3fn | --clip-in-fp8-e5m2 | --clip-in-fp16 | --clip-in-fp32]
|
||||||
[--directml [DIRECTML_DEVICE]] [--disable-ipex-hijack]
|
[--directml [DIRECTML_DEVICE]]
|
||||||
|
[--disable-ipex-hijack]
|
||||||
[--preview-option [none,auto,fast,taesd]]
|
[--preview-option [none,auto,fast,taesd]]
|
||||||
[--attention-split | --attention-quad | --attention-pytorch]
|
[--attention-split | --attention-quad | --attention-pytorch]
|
||||||
[--disable-xformers]
|
[--disable-xformers]
|
||||||
[--always-gpu | --always-high-vram | --always-normal-vram |
|
[--always-gpu | --always-high-vram | --always-normal-vram |
|
||||||
--always-low-vram | --always-no-vram | --always-cpu [CPU_NUM_THREADS]]
|
--always-low-vram | --always-no-vram | --always-cpu [CPU_NUM_THREADS]]
|
||||||
[--always-offload-from-vram] [--disable-server-log]
|
[--always-offload-from-vram]
|
||||||
|
[--pytorch-deterministic] [--disable-server-log]
|
||||||
[--debug-mode] [--is-windows-embedded-python]
|
[--debug-mode] [--is-windows-embedded-python]
|
||||||
[--disable-server-info] [--share] [--preset PRESET]
|
[--disable-server-info] [--multi-user] [--share]
|
||||||
[--language LANGUAGE] [--disable-offload-from-vram]
|
[--preset PRESET] [--disable-preset-selection]
|
||||||
[--theme THEME] [--disable-image-log]
|
[--language LANGUAGE]
|
||||||
|
[--disable-offload-from-vram] [--theme THEME]
|
||||||
|
[--disable-image-log] [--disable-analytics]
|
||||||
|
[--disable-metadata] [--disable-preset-download]
|
||||||
|
[--enable-describe-uov-image]
|
||||||
|
[--always-download-new-model]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Advanced Features
|
## Advanced Features
|
||||||
|
|
|
||||||
4
webui.py
4
webui.py
|
|
@ -112,10 +112,10 @@ with shared.gradio_root:
|
||||||
gallery = gr.Gallery(label='Gallery', show_label=False, object_fit='contain', visible=True, height=768,
|
gallery = gr.Gallery(label='Gallery', show_label=False, object_fit='contain', visible=True, height=768,
|
||||||
elem_classes=['resizable_area', 'main_view', 'final_gallery', 'image_gallery'],
|
elem_classes=['resizable_area', 'main_view', 'final_gallery', 'image_gallery'],
|
||||||
elem_id='final_gallery')
|
elem_id='final_gallery')
|
||||||
with gr.Row(elem_classes='type_row'):
|
with gr.Row():
|
||||||
with gr.Column(scale=17):
|
with gr.Column(scale=17):
|
||||||
prompt = gr.Textbox(show_label=False, placeholder="Type prompt here or paste parameters.", elem_id='positive_prompt',
|
prompt = gr.Textbox(show_label=False, placeholder="Type prompt here or paste parameters.", elem_id='positive_prompt',
|
||||||
container=False, autofocus=True, elem_classes='type_row', lines=1024)
|
autofocus=True, lines=3)
|
||||||
|
|
||||||
default_prompt = modules.config.default_prompt
|
default_prompt = modules.config.default_prompt
|
||||||
if isinstance(default_prompt, str) and default_prompt != '':
|
if isinstance(default_prompt, str) and default_prompt != '':
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue