Added feature Download, Delete, and No Base Model

This commit is contained in:
nabilaba 2025-06-09 13:26:08 +07:00
parent 59f183ab9b
commit 475527f0ee
5 changed files with 168 additions and 63 deletions

View File

@ -267,7 +267,7 @@ temp_path_cleanup_on_launch = get_config_item_or_set_default(
)
default_base_model_name = default_model = get_config_item_or_set_default(
key='default_model',
default_value='model.safetensors',
default_value='None',
validator=lambda x: isinstance(x, str),
expected_type=str
)

View File

@ -46,13 +46,12 @@ def refresh_controlnets(model_paths):
@torch.no_grad()
@torch.inference_mode()
def assert_model_integrity():
error_message = None
if model_base.unet_with_lora is None or not hasattr(model_base.unet_with_lora, "model"):
print('[Info] Skipping model integrity check: base model is not loaded.')
return True
if not isinstance(model_base.unet_with_lora.model, SDXL):
error_message = 'You have selected base model other than SDXL. This is not supported yet.'
if error_message is not None:
raise NotImplementedError(error_message)
raise NotImplementedError('You have selected base model other than SDXL. This is not supported yet.')
return True
@ -62,6 +61,11 @@ def assert_model_integrity():
def refresh_base_model(name, vae_name=None):
global model_base
if name is None or name == 'None':
print('[Info] No base model loaded.')
model_base = core.StableDiffusionModel()
return
filename = get_file_from_folder_list(name, modules.config.paths_checkpoints)
vae_filename = None
@ -215,17 +219,34 @@ def set_clip_skip(clip_skip: int):
@torch.no_grad()
@torch.inference_mode()
def clear_all_caches():
final_clip.fcs_cond_cache = {}
global final_clip
if final_clip is not None and hasattr(final_clip, "fcs_cond_cache"):
final_clip.fcs_cond_cache = {}
else:
print("[Info] Skipping cache clear: final_clip is None.")
@torch.no_grad()
@torch.inference_mode()
def prepare_text_encoder(async_call=True):
if async_call:
# TODO: make sure that this is always called in an async way so that users cannot feel it.
pass
assert_model_integrity()
ldm_patched.modules.model_management.load_models_gpu([final_clip.patcher, final_expansion.patcher])
patchers = []
if final_clip is not None and hasattr(final_clip, "patcher"):
patchers.append(final_clip.patcher)
if final_expansion is not None and hasattr(final_expansion, "patcher"):
patchers.append(final_expansion.patcher)
if len(patchers) > 0:
ldm_patched.modules.model_management.load_models_gpu(patchers)
else:
print("[Info] No models to load into GPU (no base model).")
return

View File

@ -103,7 +103,7 @@ sdxl_aspect_ratios = [
'896*1152', '896*1088', '960*1088', '960*1024', '1024*1024', '1024*960',
'1088*960', '1088*896', '1152*896', '1152*832', '1216*832', '1280*768',
'1344*768', '1344*704', '1408*704', '1472*704', '1536*640', '1600*640',
'1664*576', '1728*576'
'1664*576', '1728*576', '1080*1920', '1920*1080'
]

View File

@ -1,60 +1,13 @@
{
"default_model": "juggernautXL_v8Rundiffusion.safetensors",
"default_refiner": "None",
"default_refiner_switch": 0.5,
"default_loras": [
[
true,
"sd_xl_offset_example-lora_1.0.safetensors",
0.1
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
],
[
true,
"None",
1.0
]
],
"default_cfg_scale": 4.0,
"default_sample_sharpness": 2.0,
"default_sampler": "dpmpp_2m_sde_gpu",
"default_scheduler": "karras",
"default_performance": "Speed",
"default_performance": "Quality",
"default_prompt": "",
"default_prompt_negative": "",
"default_styles": [
"Fooocus V2",
"Fooocus Enhance",
"Fooocus Sharp"
],
"default_aspect_ratio": "1152*896",
"default_overwrite_step": -1,
"checkpoint_downloads": {
"juggernautXL_v8Rundiffusion.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/juggernautXL_v8Rundiffusion.safetensors"
},
"embeddings_downloads": {},
"lora_downloads": {
"sd_xl_offset_example-lora_1.0.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors"
},
"previous_default_models": [
"juggernautXL_version8Rundiffusion.safetensors",
"juggernautXL_version7Rundiffusion.safetensors",
"juggernautXL_v7Rundiffusion.safetensors",
"juggernautXL_version6Rundiffusion.safetensors",
"juggernautXL_v6Rundiffusion.safetensors"
]
"default_styles": [],
"default_aspect_ratio": "1080*1920",
"default_overwrite_step": -1
}

135
webui.py
View File

@ -24,6 +24,12 @@ from modules.ui_gradio_extensions import reload_javascript
from modules.auth import auth_enabled, check_auth
from modules.util import is_json
import os
import shutil
import requests
from urllib.parse import urlparse, unquote
from modules.model_loader import load_file_from_url
def get_task(*args):
args = list(args)
args.pop(0)
@ -654,7 +660,7 @@ with shared.gradio_root:
with gr.Tab(label='Models'):
with gr.Group():
with gr.Row():
base_model = gr.Dropdown(label='Base Model (SDXL only)', choices=modules.config.model_filenames, value=modules.config.default_base_model_name, show_label=True)
base_model = gr.Dropdown(label='Base Model (SDXL only)', choices=['None'] + modules.config.model_filenames, value=modules.config.default_base_model_name, show_label=True)
refiner_model = gr.Dropdown(label='Refiner (SDXL or SD 1.5)', choices=['None'] + modules.config.model_filenames, value=modules.config.default_refiner_model_name, show_label=True)
refiner_switch = gr.Slider(label='Refiner Switch At', minimum=0.1, maximum=1.0, step=0.0001,
@ -886,6 +892,131 @@ with shared.gradio_root:
refresh_files.click(refresh_files_clicked, [], refresh_files_output + lora_ctrls,
queue=False, show_progress=False)
with gr.Tab(label='Others'):
with gr.Column():
with gr.Tab(label='Download'):
file_input_path = gr.Textbox(
label='File Path or URL',
placeholder='Enter full path to file or downloadable URL',
lines=1
)
destination_folder = gr.Dropdown(
label='Target Folder',
choices=[
modules.config.paths_checkpoints[0],
modules.config.paths_loras[0],
modules.config.path_embeddings,
modules.config.path_vae,
modules.config.path_outputs
],
value=modules.config.paths_checkpoints[0]
)
download_result_text = gr.Textbox(label='Download Status', interactive=False)
download_file_button = gr.Button(value='\U00002B07 Download', variant='secondary', elem_classes='refresh_button')
def perform_download(file_url_or_path, target_directory):
try:
if isinstance(target_directory, tuple):
target_directory = target_directory[1]
if file_url_or_path.startswith(('http://', 'https://')):
response = requests.get(file_url_or_path, stream=True)
response.raise_for_status()
# Ambil nama file dari header jika tersedia
content_disposition = response.headers.get('Content-Disposition', '')
if 'filename=' in content_disposition:
filename = content_disposition.split('filename=')[-1].strip('"')
else:
# Fallback: ambil dari path URL
parsed_url = urlparse(file_url_or_path)
filename = unquote(os.path.basename(parsed_url.path))
downloaded_path = load_file_from_url(
file_url_or_path,
model_dir=target_directory,
progress=True,
file_name=filename
)
return f"\U00002705 Downloaded to: {downloaded_path}"
if os.path.isfile(file_url_or_path):
filename = os.path.basename(file_url_or_path)
destination_path = os.path.join(target_directory, filename)
shutil.copy(file_url_or_path, destination_path)
return f"\U00002705 Copied to: {destination_path}"
return "\U0000274C Error: File not found or invalid input."
except Exception as e:
return f"\U0000274C Failed: {str(e)}"
download_file_button.click(
fn=perform_download,
inputs=[file_input_path, destination_folder],
outputs=[download_result_text]
)
with gr.Tab(label='Delete'):
delete_folder_dropdown = gr.Dropdown(
label='Select Folder',
choices=[
modules.config.paths_checkpoints[0],
modules.config.paths_loras[0],
modules.config.path_embeddings,
modules.config.path_vae,
modules.config.path_outputs
],
value=modules.config.paths_checkpoints[0]
)
file_list_dropdown = gr.Dropdown(label="Select File to Delete", choices=[], multiselect=True)
delete_button = gr.Button(value='\U0001F5D1 Delete Selected File(s)', variant='stop')
delete_status = gr.Textbox(visible=True, interactive=False, label="Delete Status")
def update_file_list(folder):
try:
files = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]
return gr.update(choices=files, value=[])
except Exception as e:
return gr.update(choices=[], value=[])
def delete_selected_files(folder, selected_files):
deleted = []
errors = []
for fname in selected_files:
try:
file_path = os.path.join(folder, fname)
if os.path.isfile(file_path):
os.remove(file_path)
deleted.append(fname)
else:
errors.append(fname)
except Exception as e:
errors.append(f"{fname} (error: {e})")
if not deleted and not errors:
status = "⚠️ No files selected."
else:
status = ""
if deleted:
status += f"✅ Deleted: {', '.join(deleted)}. "
if errors:
status += f"❌ Failed: {', '.join(errors)}"
try:
files = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]
except Exception:
files = []
return status.strip(), gr.update(choices=files, value=[])
delete_folder_dropdown.change(update_file_list, inputs=[delete_folder_dropdown], outputs=[file_list_dropdown])
delete_button.click(delete_selected_files, inputs=[delete_folder_dropdown, file_list_dropdown], outputs=[delete_status, file_list_dropdown])
state_is_generating = gr.State(False)
load_data_outputs = [advanced_checkbox, image_number, prompt, negative_prompt, style_selections,
@ -1125,4 +1256,4 @@ shared.gradio_root.launch(
auth=check_auth if (args_manager.args.share or args_manager.args.listen) and auth_enabled else None,
allowed_paths=[modules.config.path_outputs],
blocked_paths=[constants.AUTH_FILENAME]
)
)