From 07ea52542e3cce692ae44d8a22bd77bd66318e53 Mon Sep 17 00:00:00 2001 From: lllyasviel Date: Wed, 13 Sep 2023 02:39:12 -0700 Subject: [PATCH] try fix lora vram again (#358) --- fooocus_version.py | 2 +- modules/patch.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/fooocus_version.py b/fooocus_version.py index b61970de..8e95544e 100644 --- a/fooocus_version.py +++ b/fooocus_version.py @@ -1 +1 @@ -version = '2.0.1' +version = '2.0.2' diff --git a/modules/patch.py b/modules/patch.py index a4d84297..08474956 100644 --- a/modules/patch.py +++ b/modules/patch.py @@ -74,6 +74,6 @@ def text_encoder_device_patched(): def patch_all(): - # comfy.model_management.text_encoder_device = text_encoder_device_patched + comfy.model_management.text_encoder_device = text_encoder_device_patched comfy.k_diffusion.external.DiscreteEpsDDPMDenoiser.forward = patched_discrete_eps_ddpm_denoiser_forward comfy.model_base.SDXL.encode_adm = sdxl_encode_adm_patched