fix: replace 37 bare except clauses with except Exception

This commit is contained in:
haosenwang1018 2026-02-27 01:08:22 +00:00
parent ae05379cc9
commit fd72aa1ad4
14 changed files with 37 additions and 37 deletions

View File

@ -50,7 +50,7 @@ class FreeU:
if hsp.device not in on_cpu_devices:
try:
hsp = Fourier_filter(hsp, threshold=1, scale=scale[1])
except:
except Exception:
print("Device", hsp.device, "does not support the torch.fft functions used in the FreeU node, switching to CPU.")
on_cpu_devices[hsp.device] = True
hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device)
@ -96,7 +96,7 @@ class FreeU_V2:
if hsp.device not in on_cpu_devices:
try:
hsp = Fourier_filter(hsp, threshold=1, scale=scale[1])
except:
except Exception:
print("Device", hsp.device, "does not support the torch.fft functions used in the FreeU node, switching to CPU.")
on_cpu_devices[hsp.device] = True
hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device)

View File

@ -33,7 +33,7 @@ def load_hypernetwork_patch(path, strength):
for d in sd:
try:
dim = int(d)
except:
except Exception:
continue
output = []

View File

@ -279,7 +279,7 @@ try:
x_vers = xformers.__version__
#I think 0.0.23 is also broken (q with bs bigger than 65535 gives CUDA error)
BROKEN_XFORMERS = x_vers.startswith("0.0.21") or x_vers.startswith("0.0.22") or x_vers.startswith("0.0.23")
except:
except Exception:
pass
def attention_xformers(q, k, v, heads, mask=None):

View File

@ -358,7 +358,7 @@ def apply_control(h, control, name):
if ctrl is not None:
try:
h += ctrl
except:
except Exception:
print("warning control could not be applied", h.shape, ctrl.shape)
return h

View File

@ -288,7 +288,7 @@ class ControlLora(ControlNet):
weight = sd[k]
try:
ldm_patched.modules.utils.set_attr(self.control_model, k, weight)
except:
except Exception:
pass
for k in self.control_weights:

View File

@ -49,14 +49,14 @@ try:
import intel_extension_for_pytorch as ipex
if torch.xpu.is_available():
xpu_available = True
except:
except Exception:
pass
try:
if torch.backends.mps.is_available():
cpu_state = CPUState.MPS
import torch.mps
except:
except Exception:
pass
if args.always_cpu:
@ -128,7 +128,7 @@ if not args.always_normal_vram and not args.always_cpu:
try:
OOM_EXCEPTION = torch.cuda.OutOfMemoryError
except:
except Exception:
OOM_EXCEPTION = Exception
XFORMERS_VERSION = ""
@ -142,7 +142,7 @@ else:
XFORMERS_IS_AVAILABLE = True
try:
XFORMERS_IS_AVAILABLE = xformers._has_cpp_library
except:
except Exception:
pass
try:
XFORMERS_VERSION = xformers.version.__version__
@ -153,9 +153,9 @@ else:
print("Please downgrade or upgrade xformers to a different version.")
print()
XFORMERS_ENABLED_VAE = False
except:
except Exception:
pass
except:
except Exception:
XFORMERS_IS_AVAILABLE = False
def is_nvidia():
@ -183,7 +183,7 @@ try:
if is_intel_xpu():
if args.attention_split == False and args.attention_quad == False:
ENABLE_PYTORCH_ATTENTION = True
except:
except Exception:
pass
if is_intel_xpu():
@ -246,7 +246,7 @@ def get_torch_device_name(device):
if device.type == "cuda":
try:
allocator_backend = torch.cuda.get_allocator_backend()
except:
except Exception:
allocator_backend = ""
return "{} {} : {}".format(device, torch.cuda.get_device_name(device), allocator_backend)
else:
@ -258,7 +258,7 @@ def get_torch_device_name(device):
try:
print("Device:", get_torch_device_name(get_torch_device()))
except:
except Exception:
print("Could not pick default device.")
print("VAE dtype:", VAE_DTYPE)

View File

@ -237,7 +237,7 @@ def token_weights(string, current_weight):
try:
weight = float(x[xx+1:])
x = x[:xx]
except:
except Exception:
pass
out += token_weights(x, weight)
else:
@ -294,7 +294,7 @@ def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=No
try:
if os.path.commonpath((embed_dir, embed_path)) != embed_dir:
continue
except:
except Exception:
continue
if not os.path.isfile(embed_path):
extensions = ['.safetensors', '.pt', '.bin']
@ -323,7 +323,7 @@ def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=No
if 'weights_only' in torch.load.__code__.co_varnames:
try:
embed = torch.load(embed_path, weights_only=True, map_location="cpu")
except:
except Exception:
embed_out = safe_load_embed_zip(embed_path)
else:
embed = torch.load(embed_path, map_location="cpu", weights_only=True)

View File

@ -620,7 +620,7 @@ class CodeFormer(VQAutoEncoder):
n_layers = len(
set([x.split(".")[1] for x in state_dict.keys() if "ft_layers" in x])
)
except:
except Exception:
pass
codebook_size = state_dict["quantize.embedding.weight"].shape[0]

View File

@ -93,7 +93,7 @@ def load_state_dict(state_dict) -> PyTorchModel:
else:
try:
model = ESRGAN(state_dict)
except:
except Exception:
# pylint: disable=raise-missing-from
raise UnsupportedModel
return model

View File

@ -43,7 +43,7 @@ filename_list_cache = {}
if not os.path.exists(input_directory):
try:
pass # os.makedirs(input_directory)
except:
except Exception:
print("Failed to create input directory")
def set_output_directory(output_dir):
@ -228,7 +228,7 @@ def get_save_image_path(filename_prefix, output_dir, image_width=0, image_height
prefix = filename[:prefix_len + 1]
try:
digits = int(filename[prefix_len + 1:].split('_')[0])
except:
except Exception:
digits = 0
return (digits, prefix)

View File

@ -1473,7 +1473,7 @@ def worker():
build_image_wall(task)
task.yields.append(['finish', task.results])
pipeline.prepare_text_encoder(async_call=True)
except:
except Exception:
traceback.print_exc()
task.yields.append(['finish', task.results])
finally:

View File

@ -37,5 +37,5 @@ def try_eval_env_var(value: str, expected_type=None):
if expected_type is not None and not isinstance(value_eval, expected_type):
return value
return value_eval
except:
except Exception:
return value

View File

@ -78,7 +78,7 @@ def get_str(key: str, fallback: str | None, source_dict: dict, results: list, de
assert isinstance(h, str)
results.append(h)
return h
except:
except Exception:
results.append(gr.update())
return None
@ -89,7 +89,7 @@ def get_list(key: str, fallback: str | None, source_dict: dict, results: list, d
h = eval(h)
assert isinstance(h, list)
results.append(h)
except:
except Exception:
results.append(gr.update())
@ -99,7 +99,7 @@ def get_number(key: str, fallback: str | None, source_dict: dict, results: list,
assert h is not None
h = cast_type(h)
results.append(h)
except:
except Exception:
results.append(gr.update())
@ -110,7 +110,7 @@ def get_image_number(key: str, fallback: str | None, source_dict: dict, results:
h = int(h)
h = min(h, modules.config.default_max_image_number)
results.append(h)
except:
except Exception:
results.append(1)
@ -126,7 +126,7 @@ def get_steps(key: str, fallback: str | None, source_dict: dict, results: list,
results.append(h)
return
results.append(-1)
except:
except Exception:
results.append(-1)
@ -143,7 +143,7 @@ def get_resolution(key: str, fallback: str | None, source_dict: dict, results: l
results.append(gr.update())
results.append(int(width))
results.append(int(height))
except:
except Exception:
results.append(gr.update())
results.append(gr.update())
results.append(gr.update())
@ -156,7 +156,7 @@ def get_seed(key: str, fallback: str | None, source_dict: dict, results: list, d
h = int(h)
results.append(False)
results.append(h)
except:
except Exception:
results.append(gr.update())
results.append(gr.update())
@ -171,7 +171,7 @@ def get_inpaint_engine_version(key: str, fallback: str | None, source_dict: dict
results.append(gr.update())
results.append(h)
return h
except:
except Exception:
results.append(gr.update())
results.append('empty')
return None
@ -185,7 +185,7 @@ def get_inpaint_method(key: str, fallback: str | None, source_dict: dict, result
for i in range(modules.config.default_enhance_tabs):
results.append(h)
return h
except:
except Exception:
results.append(gr.update())
for i in range(modules.config.default_enhance_tabs):
results.append(gr.update())
@ -198,7 +198,7 @@ def get_adm_guidance(key: str, fallback: str | None, source_dict: dict, results:
results.append(float(p))
results.append(float(n))
results.append(float(e))
except:
except Exception:
results.append(gr.update())
results.append(gr.update())
results.append(gr.update())
@ -213,7 +213,7 @@ def get_freeu(key: str, fallback: str | None, source_dict: dict, results: list,
results.append(float(b2))
results.append(float(s1))
results.append(float(s2))
except:
except Exception:
results.append(False)
results.append(gr.update())
results.append(gr.update())
@ -240,7 +240,7 @@ def get_lora(key: str, fallback: str | None, source_dict: dict, results: list, p
results.append(enabled)
results.append(name)
results.append(weight)
except:
except Exception:
results.append(True)
results.append('None')
results.append(1)

View File

@ -482,7 +482,7 @@ def apply_wildcards(wildcard_text, rng, i, read_wildcards_in_order) -> str:
wildcard_text = wildcard_text.replace(f'__{placeholder}__', words[i % len(words)], 1)
else:
wildcard_text = wildcard_text.replace(f'__{placeholder}__', rng.choice(words), 1)
except:
except Exception:
print(f'[Wildcards] Warning: {placeholder}.txt missing or empty. '
f'Using "{placeholder}" as a normal word.')
wildcard_text = wildcard_text.replace(f'__{placeholder}__', placeholder)