From 3cc95314d6773756af3ac7f3822a646f5368e165 Mon Sep 17 00:00:00 2001 From: lllyasviel Date: Sat, 21 Oct 2023 17:12:42 -0700 Subject: [PATCH] Fooocus GitHub Bot Commit This commit is generated by a GitHub bot of Fooocus --- .../headless/fcbh/ldm/modules/attention.py | 12 +-- .../headless/fcbh_extras/nodes_hypertile.py | 83 +++++++++++++++++++ backend/headless/nodes.py | 3 +- fooocus_version.py | 2 +- 4 files changed, 93 insertions(+), 7 deletions(-) create mode 100644 backend/headless/fcbh_extras/nodes_hypertile.py diff --git a/backend/headless/fcbh/ldm/modules/attention.py b/backend/headless/fcbh/ldm/modules/attention.py index d00a9af4..1a3b9f02 100644 --- a/backend/headless/fcbh/ldm/modules/attention.py +++ b/backend/headless/fcbh/ldm/modules/attention.py @@ -253,12 +253,14 @@ def attention_split(q, k, v, heads, mask=None): return r2 def attention_xformers(q, k, v, heads, mask=None): - b, _, _ = q.shape + b, _, dim_head = q.shape + dim_head //= heads + q, k, v = map( lambda t: t.unsqueeze(3) - .reshape(b, t.shape[1], heads, -1) + .reshape(b, -1, heads, dim_head) .permute(0, 2, 1, 3) - .reshape(b * heads, t.shape[1], -1) + .reshape(b * heads, -1, dim_head) .contiguous(), (q, k, v), ) @@ -270,9 +272,9 @@ def attention_xformers(q, k, v, heads, mask=None): raise NotImplementedError out = ( out.unsqueeze(0) - .reshape(b, heads, out.shape[1], -1) + .reshape(b, heads, -1, dim_head) .permute(0, 2, 1, 3) - .reshape(b, out.shape[1], -1) + .reshape(b, -1, heads * dim_head) ) return out diff --git a/backend/headless/fcbh_extras/nodes_hypertile.py b/backend/headless/fcbh_extras/nodes_hypertile.py new file mode 100644 index 00000000..0d7d4c95 --- /dev/null +++ b/backend/headless/fcbh_extras/nodes_hypertile.py @@ -0,0 +1,83 @@ +#Taken from: https://github.com/tfernd/HyperTile/ + +import math +from einops import rearrange +import random + +def random_divisor(value: int, min_value: int, /, max_options: int = 1, counter = 0) -> int: + min_value = min(min_value, value) + + # All big divisors of value (inclusive) + divisors = [i for i in range(min_value, value + 1) if value % i == 0] + + ns = [value // i for i in divisors[:max_options]] # has at least 1 element + + random.seed(counter) + idx = random.randint(0, len(ns) - 1) + + return ns[idx] + +class HyperTile: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), + "swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), + "max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), + "scale_depth": ("BOOLEAN", {"default": False}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, tile_size, swap_size, max_depth, scale_depth): + model_channels = model.model.model_config.unet_config["model_channels"] + + apply_to = set() + temp = model_channels + for x in range(max_depth + 1): + apply_to.add(temp) + temp *= 2 + + latent_tile_size = max(32, tile_size) // 8 + self.temp = None + self.counter = 1 + + def hypertile_in(q, k, v, extra_options): + if q.shape[-1] in apply_to: + shape = extra_options["original_shape"] + aspect_ratio = shape[-1] / shape[-2] + + hw = q.size(1) + h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) + + factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1 + nh = random_divisor(h, latent_tile_size * factor, swap_size, self.counter) + self.counter += 1 + nw = random_divisor(w, latent_tile_size * factor, swap_size, self.counter) + self.counter += 1 + + if nh * nw > 1: + q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw) + self.temp = (nh, nw, h, w) + return q, k, v + + return q, k, v + def hypertile_out(out, extra_options): + if self.temp is not None: + nh, nw, h, w = self.temp + self.temp = None + out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) + out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw) + return out + + + m = model.clone() + m.set_model_attn1_patch(hypertile_in) + m.set_model_attn1_output_patch(hypertile_out) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "HyperTile": HyperTile, +} diff --git a/backend/headless/nodes.py b/backend/headless/nodes.py index 2c1a25f0..b57cd823 100644 --- a/backend/headless/nodes.py +++ b/backend/headless/nodes.py @@ -1796,7 +1796,8 @@ def init_custom_nodes(): "nodes_clip_sdxl.py", "nodes_canny.py", "nodes_freelunch.py", - "nodes_custom_sampler.py" + "nodes_custom_sampler.py", + "nodes_hypertile.py", ] for node_file in extras_files: diff --git a/fooocus_version.py b/fooocus_version.py index c264cd84..e8adf2ec 100644 --- a/fooocus_version.py +++ b/fooocus_version.py @@ -1 +1 @@ -version = '2.1.723' +version = '2.1.724'