Spaces:
Runtime error
Runtime error
superlabs-jinwoo
commited on
Commit
Β·
50f828f
1
Parent(s):
649346a
Init project
Browse files- .gitignore +1 -0
- app.py +109 -0
- hidiffusion/__init__.py +3 -0
- hidiffusion/hidiffusion.py +0 -0
- hidiffusion/sd_module_key/sd15_module_key.txt +685 -0
- hidiffusion/sd_module_key/sdxl_module_key.txt +0 -0
- hidiffusion/utils.py +31 -0
- requirements.txt +7 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
__pycache__
|
app.py
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HiDiffusion demo for sd1.5 and sdxl."""
|
2 |
+
from functools import lru_cache
|
3 |
+
|
4 |
+
import gradio as gr
|
5 |
+
import PIL
|
6 |
+
import torch
|
7 |
+
from diffusers import DDIMScheduler, DiffusionPipeline, StableDiffusionPipeline, StableDiffusionXLPipeline
|
8 |
+
|
9 |
+
from hidiffusion import apply_hidiffusion
|
10 |
+
|
11 |
+
pretrained_models = {
|
12 |
+
"sd1.5": "runwayml/stable-diffusion-v1-5",
|
13 |
+
"sdxl": "stabilityai/stable-diffusion-xl-base-1.0",
|
14 |
+
}
|
15 |
+
|
16 |
+
pipeline_types = {
|
17 |
+
"sd1.5": StableDiffusionPipeline,
|
18 |
+
"sdxl": StableDiffusionXLPipeline,
|
19 |
+
}
|
20 |
+
|
21 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
22 |
+
|
23 |
+
|
24 |
+
@lru_cache
|
25 |
+
def load_pipeline(model_type: str) -> DiffusionPipeline:
|
26 |
+
"""Load a pretrained model"""
|
27 |
+
pretrained_model, pipeline_cls = pretrained_models[model_type], pipeline_types[model_type]
|
28 |
+
scheduler = DDIMScheduler.from_pretrained(pretrained_model, subfolder="scheduler")
|
29 |
+
pipe = pipeline_cls.from_pretrained(
|
30 |
+
pretrained_model, scheduler=scheduler, torch_dtype=torch.float16, variant="fp16"
|
31 |
+
).to(device)
|
32 |
+
pipe.enable_xformers_memory_efficient_attention()
|
33 |
+
pipe.enable_vae_tiling()
|
34 |
+
return pipe
|
35 |
+
|
36 |
+
|
37 |
+
def generate(
|
38 |
+
model_type: str, use_hidiffusion: bool, positive: str, negative: str, width: int, height: int, guidance_scale: float
|
39 |
+
) -> PIL.Image.Image:
|
40 |
+
pipe = load_pipeline(model_type)
|
41 |
+
print(f"{model_type} pipeline is loaded")
|
42 |
+
if use_hidiffusion:
|
43 |
+
apply_hidiffusion(pipe)
|
44 |
+
print("hidiffusion is applied")
|
45 |
+
image = pipe(
|
46 |
+
positive, negative_prompt=negative, guidance_scale=guidance_scale, height=height, width=width, eta=1.0
|
47 |
+
).images[0]
|
48 |
+
print("generation is done")
|
49 |
+
return image
|
50 |
+
|
51 |
+
|
52 |
+
demo = gr.Interface(
|
53 |
+
fn=generate,
|
54 |
+
inputs=[
|
55 |
+
gr.Radio(choices=["sd1.5", "sdxl"], label="Model Type", value="sd1.5"),
|
56 |
+
gr.Checkbox(value=True, label="Use HiDiffusion"),
|
57 |
+
gr.Textbox(label="Positive Prompt"),
|
58 |
+
gr.Textbox(label="Negative Prompt"),
|
59 |
+
gr.Slider(512, 4096, value=1024, step=1, label="width"),
|
60 |
+
gr.Slider(512, 4096, value=1024, step=1, label="height"),
|
61 |
+
gr.Slider(0.0, 20.0, value=7.5, step=0.1, label="Guidance Scale"),
|
62 |
+
],
|
63 |
+
outputs=gr.Image(),
|
64 |
+
allow_flagging="never",
|
65 |
+
title="HiDiffusion Demo",
|
66 |
+
description="""
|
67 |
+
HiDiffusion is a training-free method that increases the resolution and speed of pretrained diffusion models.\n
|
68 |
+
It is designed as a plug-and-play implementation. It can be integrated into diffusion pipelines by only adding a single line of code!\n
|
69 |
+
More information: https://github.com/megvii-research/HiDiffusion
|
70 |
+
""",
|
71 |
+
examples=[
|
72 |
+
[
|
73 |
+
"sd1.5",
|
74 |
+
True,
|
75 |
+
# positive
|
76 |
+
"thick strokes, bright colors, an exotic fox, cute, chibi kawaii,"
|
77 |
+
"detailed fur, hyperdetailed , big reflective eyes, fairytale, artstation,"
|
78 |
+
"centered composition, perfect composition, centered, vibrant colors, muted colors, high detailed, 8k.",
|
79 |
+
# negative
|
80 |
+
"ugly, tiling, poorly drawn face, out of frame, disfigured, deformed, blurry, bad anatomy, blurred",
|
81 |
+
# width
|
82 |
+
1024,
|
83 |
+
# height
|
84 |
+
1024,
|
85 |
+
# guidance scale
|
86 |
+
7.5,
|
87 |
+
],
|
88 |
+
[
|
89 |
+
"sdxl",
|
90 |
+
True,
|
91 |
+
# positive
|
92 |
+
"thick strokes, bright colors, an exotic fox, cute, chibi kawaii,"
|
93 |
+
"detailed fur, hyperdetailed , big reflective eyes, fairytale, artstation,"
|
94 |
+
"centered composition, perfect composition, centered, vibrant colors, muted colors, high detailed, 8k.",
|
95 |
+
# negative
|
96 |
+
"blurry, ugly, duplicate, poorly drawn, deformed, mosaic",
|
97 |
+
# width
|
98 |
+
2048,
|
99 |
+
# height
|
100 |
+
2048,
|
101 |
+
# guidance scale
|
102 |
+
7.5,
|
103 |
+
],
|
104 |
+
],
|
105 |
+
)
|
106 |
+
|
107 |
+
|
108 |
+
if __name__ == "__main__":
|
109 |
+
demo.launch(server_name="0.0.0.0")
|
hidiffusion/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .hidiffusion import apply_hidiffusion, remove_hidiffusion
|
2 |
+
|
3 |
+
__all__ = ["apply_hidiffusion", "remove_hidiffusion"]
|
hidiffusion/hidiffusion.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
hidiffusion/sd_module_key/sd15_module_key.txt
ADDED
@@ -0,0 +1,685 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
conv_in
|
2 |
+
time_proj
|
3 |
+
time_embedding
|
4 |
+
time_embedding.linear_1
|
5 |
+
time_embedding.act
|
6 |
+
time_embedding.linear_2
|
7 |
+
down_blocks
|
8 |
+
down_blocks.0
|
9 |
+
down_blocks.0.attentions
|
10 |
+
down_blocks.0.attentions.0
|
11 |
+
down_blocks.0.attentions.0.norm
|
12 |
+
down_blocks.0.attentions.0.proj_in
|
13 |
+
down_blocks.0.attentions.0.transformer_blocks
|
14 |
+
down_blocks.0.attentions.0.transformer_blocks.0
|
15 |
+
down_blocks.0.attentions.0.transformer_blocks.0.norm1
|
16 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1
|
17 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q
|
18 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_k
|
19 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v
|
20 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out
|
21 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.0
|
22 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.1
|
23 |
+
down_blocks.0.attentions.0.transformer_blocks.0.norm2
|
24 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2
|
25 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q
|
26 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_k
|
27 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v
|
28 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out
|
29 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.0
|
30 |
+
down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.1
|
31 |
+
down_blocks.0.attentions.0.transformer_blocks.0.norm3
|
32 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff
|
33 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff.net
|
34 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff.net.0
|
35 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff.net.0.proj
|
36 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff.net.1
|
37 |
+
down_blocks.0.attentions.0.transformer_blocks.0.ff.net.2
|
38 |
+
down_blocks.0.attentions.0.proj_out
|
39 |
+
down_blocks.0.attentions.1
|
40 |
+
down_blocks.0.attentions.1.norm
|
41 |
+
down_blocks.0.attentions.1.proj_in
|
42 |
+
down_blocks.0.attentions.1.transformer_blocks
|
43 |
+
down_blocks.0.attentions.1.transformer_blocks.0
|
44 |
+
down_blocks.0.attentions.1.transformer_blocks.0.norm1
|
45 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1
|
46 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q
|
47 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_k
|
48 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v
|
49 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out
|
50 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.0
|
51 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.1
|
52 |
+
down_blocks.0.attentions.1.transformer_blocks.0.norm2
|
53 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2
|
54 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q
|
55 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_k
|
56 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v
|
57 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out
|
58 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.0
|
59 |
+
down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.1
|
60 |
+
down_blocks.0.attentions.1.transformer_blocks.0.norm3
|
61 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff
|
62 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff.net
|
63 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff.net.0
|
64 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff.net.0.proj
|
65 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff.net.1
|
66 |
+
down_blocks.0.attentions.1.transformer_blocks.0.ff.net.2
|
67 |
+
down_blocks.0.attentions.1.proj_out
|
68 |
+
down_blocks.0.resnets
|
69 |
+
down_blocks.0.resnets.0
|
70 |
+
down_blocks.0.resnets.0.norm1
|
71 |
+
down_blocks.0.resnets.0.conv1
|
72 |
+
down_blocks.0.resnets.0.time_emb_proj
|
73 |
+
down_blocks.0.resnets.0.norm2
|
74 |
+
down_blocks.0.resnets.0.dropout
|
75 |
+
down_blocks.0.resnets.0.conv2
|
76 |
+
down_blocks.0.resnets.1
|
77 |
+
down_blocks.0.resnets.1.norm1
|
78 |
+
down_blocks.0.resnets.1.conv1
|
79 |
+
down_blocks.0.resnets.1.time_emb_proj
|
80 |
+
down_blocks.0.resnets.1.norm2
|
81 |
+
down_blocks.0.resnets.1.dropout
|
82 |
+
down_blocks.0.resnets.1.conv2
|
83 |
+
down_blocks.0.downsamplers
|
84 |
+
down_blocks.0.downsamplers.0
|
85 |
+
down_blocks.0.downsamplers.0.conv
|
86 |
+
down_blocks.1
|
87 |
+
down_blocks.1.attentions
|
88 |
+
down_blocks.1.attentions.0
|
89 |
+
down_blocks.1.attentions.0.norm
|
90 |
+
down_blocks.1.attentions.0.proj_in
|
91 |
+
down_blocks.1.attentions.0.transformer_blocks
|
92 |
+
down_blocks.1.attentions.0.transformer_blocks.0
|
93 |
+
down_blocks.1.attentions.0.transformer_blocks.0.norm1
|
94 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1
|
95 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q
|
96 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k
|
97 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v
|
98 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out
|
99 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0
|
100 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.1
|
101 |
+
down_blocks.1.attentions.0.transformer_blocks.0.norm2
|
102 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2
|
103 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q
|
104 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k
|
105 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v
|
106 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out
|
107 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0
|
108 |
+
down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.1
|
109 |
+
down_blocks.1.attentions.0.transformer_blocks.0.norm3
|
110 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff
|
111 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff.net
|
112 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff.net.0
|
113 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff.net.0.proj
|
114 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff.net.1
|
115 |
+
down_blocks.1.attentions.0.transformer_blocks.0.ff.net.2
|
116 |
+
down_blocks.1.attentions.0.proj_out
|
117 |
+
down_blocks.1.attentions.1
|
118 |
+
down_blocks.1.attentions.1.norm
|
119 |
+
down_blocks.1.attentions.1.proj_in
|
120 |
+
down_blocks.1.attentions.1.transformer_blocks
|
121 |
+
down_blocks.1.attentions.1.transformer_blocks.0
|
122 |
+
down_blocks.1.attentions.1.transformer_blocks.0.norm1
|
123 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1
|
124 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q
|
125 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k
|
126 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v
|
127 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out
|
128 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0
|
129 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.1
|
130 |
+
down_blocks.1.attentions.1.transformer_blocks.0.norm2
|
131 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2
|
132 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q
|
133 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k
|
134 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v
|
135 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out
|
136 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0
|
137 |
+
down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.1
|
138 |
+
down_blocks.1.attentions.1.transformer_blocks.0.norm3
|
139 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff
|
140 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff.net
|
141 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff.net.0
|
142 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff.net.0.proj
|
143 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff.net.1
|
144 |
+
down_blocks.1.attentions.1.transformer_blocks.0.ff.net.2
|
145 |
+
down_blocks.1.attentions.1.proj_out
|
146 |
+
down_blocks.1.resnets
|
147 |
+
down_blocks.1.resnets.0
|
148 |
+
down_blocks.1.resnets.0.norm1
|
149 |
+
down_blocks.1.resnets.0.conv1
|
150 |
+
down_blocks.1.resnets.0.time_emb_proj
|
151 |
+
down_blocks.1.resnets.0.norm2
|
152 |
+
down_blocks.1.resnets.0.dropout
|
153 |
+
down_blocks.1.resnets.0.conv2
|
154 |
+
down_blocks.1.resnets.0.conv_shortcut
|
155 |
+
down_blocks.1.resnets.1
|
156 |
+
down_blocks.1.resnets.1.norm1
|
157 |
+
down_blocks.1.resnets.1.conv1
|
158 |
+
down_blocks.1.resnets.1.time_emb_proj
|
159 |
+
down_blocks.1.resnets.1.norm2
|
160 |
+
down_blocks.1.resnets.1.dropout
|
161 |
+
down_blocks.1.resnets.1.conv2
|
162 |
+
down_blocks.1.downsamplers
|
163 |
+
down_blocks.1.downsamplers.0
|
164 |
+
down_blocks.1.downsamplers.0.conv
|
165 |
+
down_blocks.2
|
166 |
+
down_blocks.2.attentions
|
167 |
+
down_blocks.2.attentions.0
|
168 |
+
down_blocks.2.attentions.0.norm
|
169 |
+
down_blocks.2.attentions.0.proj_in
|
170 |
+
down_blocks.2.attentions.0.transformer_blocks
|
171 |
+
down_blocks.2.attentions.0.transformer_blocks.0
|
172 |
+
down_blocks.2.attentions.0.transformer_blocks.0.norm1
|
173 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1
|
174 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q
|
175 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k
|
176 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v
|
177 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out
|
178 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0
|
179 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.1
|
180 |
+
down_blocks.2.attentions.0.transformer_blocks.0.norm2
|
181 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2
|
182 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q
|
183 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k
|
184 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v
|
185 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out
|
186 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0
|
187 |
+
down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.1
|
188 |
+
down_blocks.2.attentions.0.transformer_blocks.0.norm3
|
189 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff
|
190 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff.net
|
191 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff.net.0
|
192 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff.net.0.proj
|
193 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff.net.1
|
194 |
+
down_blocks.2.attentions.0.transformer_blocks.0.ff.net.2
|
195 |
+
down_blocks.2.attentions.0.proj_out
|
196 |
+
down_blocks.2.attentions.1
|
197 |
+
down_blocks.2.attentions.1.norm
|
198 |
+
down_blocks.2.attentions.1.proj_in
|
199 |
+
down_blocks.2.attentions.1.transformer_blocks
|
200 |
+
down_blocks.2.attentions.1.transformer_blocks.0
|
201 |
+
down_blocks.2.attentions.1.transformer_blocks.0.norm1
|
202 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1
|
203 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q
|
204 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k
|
205 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v
|
206 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out
|
207 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0
|
208 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.1
|
209 |
+
down_blocks.2.attentions.1.transformer_blocks.0.norm2
|
210 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2
|
211 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q
|
212 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k
|
213 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v
|
214 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out
|
215 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0
|
216 |
+
down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.1
|
217 |
+
down_blocks.2.attentions.1.transformer_blocks.0.norm3
|
218 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff
|
219 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff.net
|
220 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff.net.0
|
221 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff.net.0.proj
|
222 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff.net.1
|
223 |
+
down_blocks.2.attentions.1.transformer_blocks.0.ff.net.2
|
224 |
+
down_blocks.2.attentions.1.proj_out
|
225 |
+
down_blocks.2.resnets
|
226 |
+
down_blocks.2.resnets.0
|
227 |
+
down_blocks.2.resnets.0.norm1
|
228 |
+
down_blocks.2.resnets.0.conv1
|
229 |
+
down_blocks.2.resnets.0.time_emb_proj
|
230 |
+
down_blocks.2.resnets.0.norm2
|
231 |
+
down_blocks.2.resnets.0.dropout
|
232 |
+
down_blocks.2.resnets.0.conv2
|
233 |
+
down_blocks.2.resnets.0.conv_shortcut
|
234 |
+
down_blocks.2.resnets.1
|
235 |
+
down_blocks.2.resnets.1.norm1
|
236 |
+
down_blocks.2.resnets.1.conv1
|
237 |
+
down_blocks.2.resnets.1.time_emb_proj
|
238 |
+
down_blocks.2.resnets.1.norm2
|
239 |
+
down_blocks.2.resnets.1.dropout
|
240 |
+
down_blocks.2.resnets.1.conv2
|
241 |
+
down_blocks.2.downsamplers
|
242 |
+
down_blocks.2.downsamplers.0
|
243 |
+
down_blocks.2.downsamplers.0.conv
|
244 |
+
down_blocks.3
|
245 |
+
down_blocks.3.resnets
|
246 |
+
down_blocks.3.resnets.0
|
247 |
+
down_blocks.3.resnets.0.norm1
|
248 |
+
down_blocks.3.resnets.0.conv1
|
249 |
+
down_blocks.3.resnets.0.time_emb_proj
|
250 |
+
down_blocks.3.resnets.0.norm2
|
251 |
+
down_blocks.3.resnets.0.dropout
|
252 |
+
down_blocks.3.resnets.0.conv2
|
253 |
+
down_blocks.3.resnets.1
|
254 |
+
down_blocks.3.resnets.1.norm1
|
255 |
+
down_blocks.3.resnets.1.conv1
|
256 |
+
down_blocks.3.resnets.1.time_emb_proj
|
257 |
+
down_blocks.3.resnets.1.norm2
|
258 |
+
down_blocks.3.resnets.1.dropout
|
259 |
+
down_blocks.3.resnets.1.conv2
|
260 |
+
up_blocks
|
261 |
+
up_blocks.0
|
262 |
+
up_blocks.0.resnets
|
263 |
+
up_blocks.0.resnets.0
|
264 |
+
up_blocks.0.resnets.0.norm1
|
265 |
+
up_blocks.0.resnets.0.conv1
|
266 |
+
up_blocks.0.resnets.0.time_emb_proj
|
267 |
+
up_blocks.0.resnets.0.norm2
|
268 |
+
up_blocks.0.resnets.0.dropout
|
269 |
+
up_blocks.0.resnets.0.conv2
|
270 |
+
up_blocks.0.resnets.0.conv_shortcut
|
271 |
+
up_blocks.0.resnets.1
|
272 |
+
up_blocks.0.resnets.1.norm1
|
273 |
+
up_blocks.0.resnets.1.conv1
|
274 |
+
up_blocks.0.resnets.1.time_emb_proj
|
275 |
+
up_blocks.0.resnets.1.norm2
|
276 |
+
up_blocks.0.resnets.1.dropout
|
277 |
+
up_blocks.0.resnets.1.conv2
|
278 |
+
up_blocks.0.resnets.1.conv_shortcut
|
279 |
+
up_blocks.0.resnets.2
|
280 |
+
up_blocks.0.resnets.2.norm1
|
281 |
+
up_blocks.0.resnets.2.conv1
|
282 |
+
up_blocks.0.resnets.2.time_emb_proj
|
283 |
+
up_blocks.0.resnets.2.norm2
|
284 |
+
up_blocks.0.resnets.2.dropout
|
285 |
+
up_blocks.0.resnets.2.conv2
|
286 |
+
up_blocks.0.resnets.2.conv_shortcut
|
287 |
+
up_blocks.0.upsamplers
|
288 |
+
up_blocks.0.upsamplers.0
|
289 |
+
up_blocks.0.upsamplers.0.conv
|
290 |
+
up_blocks.1
|
291 |
+
up_blocks.1.attentions
|
292 |
+
up_blocks.1.attentions.0
|
293 |
+
up_blocks.1.attentions.0.norm
|
294 |
+
up_blocks.1.attentions.0.proj_in
|
295 |
+
up_blocks.1.attentions.0.transformer_blocks
|
296 |
+
up_blocks.1.attentions.0.transformer_blocks.0
|
297 |
+
up_blocks.1.attentions.0.transformer_blocks.0.norm1
|
298 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1
|
299 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q
|
300 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k
|
301 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v
|
302 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out
|
303 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0
|
304 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.1
|
305 |
+
up_blocks.1.attentions.0.transformer_blocks.0.norm2
|
306 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2
|
307 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q
|
308 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k
|
309 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v
|
310 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out
|
311 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0
|
312 |
+
up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.1
|
313 |
+
up_blocks.1.attentions.0.transformer_blocks.0.norm3
|
314 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff
|
315 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff.net
|
316 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff.net.0
|
317 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff.net.0.proj
|
318 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff.net.1
|
319 |
+
up_blocks.1.attentions.0.transformer_blocks.0.ff.net.2
|
320 |
+
up_blocks.1.attentions.0.proj_out
|
321 |
+
up_blocks.1.attentions.1
|
322 |
+
up_blocks.1.attentions.1.norm
|
323 |
+
up_blocks.1.attentions.1.proj_in
|
324 |
+
up_blocks.1.attentions.1.transformer_blocks
|
325 |
+
up_blocks.1.attentions.1.transformer_blocks.0
|
326 |
+
up_blocks.1.attentions.1.transformer_blocks.0.norm1
|
327 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1
|
328 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q
|
329 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k
|
330 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v
|
331 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out
|
332 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0
|
333 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.1
|
334 |
+
up_blocks.1.attentions.1.transformer_blocks.0.norm2
|
335 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2
|
336 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q
|
337 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k
|
338 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v
|
339 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out
|
340 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0
|
341 |
+
up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.1
|
342 |
+
up_blocks.1.attentions.1.transformer_blocks.0.norm3
|
343 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff
|
344 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff.net
|
345 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff.net.0
|
346 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff.net.0.proj
|
347 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff.net.1
|
348 |
+
up_blocks.1.attentions.1.transformer_blocks.0.ff.net.2
|
349 |
+
up_blocks.1.attentions.1.proj_out
|
350 |
+
up_blocks.1.attentions.2
|
351 |
+
up_blocks.1.attentions.2.norm
|
352 |
+
up_blocks.1.attentions.2.proj_in
|
353 |
+
up_blocks.1.attentions.2.transformer_blocks
|
354 |
+
up_blocks.1.attentions.2.transformer_blocks.0
|
355 |
+
up_blocks.1.attentions.2.transformer_blocks.0.norm1
|
356 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1
|
357 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q
|
358 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_k
|
359 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v
|
360 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out
|
361 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.0
|
362 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.1
|
363 |
+
up_blocks.1.attentions.2.transformer_blocks.0.norm2
|
364 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2
|
365 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q
|
366 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_k
|
367 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v
|
368 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out
|
369 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.0
|
370 |
+
up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.1
|
371 |
+
up_blocks.1.attentions.2.transformer_blocks.0.norm3
|
372 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff
|
373 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff.net
|
374 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff.net.0
|
375 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff.net.0.proj
|
376 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff.net.1
|
377 |
+
up_blocks.1.attentions.2.transformer_blocks.0.ff.net.2
|
378 |
+
up_blocks.1.attentions.2.proj_out
|
379 |
+
up_blocks.1.resnets
|
380 |
+
up_blocks.1.resnets.0
|
381 |
+
up_blocks.1.resnets.0.norm1
|
382 |
+
up_blocks.1.resnets.0.conv1
|
383 |
+
up_blocks.1.resnets.0.time_emb_proj
|
384 |
+
up_blocks.1.resnets.0.norm2
|
385 |
+
up_blocks.1.resnets.0.dropout
|
386 |
+
up_blocks.1.resnets.0.conv2
|
387 |
+
up_blocks.1.resnets.0.conv_shortcut
|
388 |
+
up_blocks.1.resnets.1
|
389 |
+
up_blocks.1.resnets.1.norm1
|
390 |
+
up_blocks.1.resnets.1.conv1
|
391 |
+
up_blocks.1.resnets.1.time_emb_proj
|
392 |
+
up_blocks.1.resnets.1.norm2
|
393 |
+
up_blocks.1.resnets.1.dropout
|
394 |
+
up_blocks.1.resnets.1.conv2
|
395 |
+
up_blocks.1.resnets.1.conv_shortcut
|
396 |
+
up_blocks.1.resnets.2
|
397 |
+
up_blocks.1.resnets.2.norm1
|
398 |
+
up_blocks.1.resnets.2.conv1
|
399 |
+
up_blocks.1.resnets.2.time_emb_proj
|
400 |
+
up_blocks.1.resnets.2.norm2
|
401 |
+
up_blocks.1.resnets.2.dropout
|
402 |
+
up_blocks.1.resnets.2.conv2
|
403 |
+
up_blocks.1.resnets.2.conv_shortcut
|
404 |
+
up_blocks.1.upsamplers
|
405 |
+
up_blocks.1.upsamplers.0
|
406 |
+
up_blocks.1.upsamplers.0.conv
|
407 |
+
up_blocks.2
|
408 |
+
up_blocks.2.attentions
|
409 |
+
up_blocks.2.attentions.0
|
410 |
+
up_blocks.2.attentions.0.norm
|
411 |
+
up_blocks.2.attentions.0.proj_in
|
412 |
+
up_blocks.2.attentions.0.transformer_blocks
|
413 |
+
up_blocks.2.attentions.0.transformer_blocks.0
|
414 |
+
up_blocks.2.attentions.0.transformer_blocks.0.norm1
|
415 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1
|
416 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q
|
417 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k
|
418 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v
|
419 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out
|
420 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0
|
421 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.1
|
422 |
+
up_blocks.2.attentions.0.transformer_blocks.0.norm2
|
423 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2
|
424 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q
|
425 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k
|
426 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v
|
427 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out
|
428 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0
|
429 |
+
up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.1
|
430 |
+
up_blocks.2.attentions.0.transformer_blocks.0.norm3
|
431 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff
|
432 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff.net
|
433 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff.net.0
|
434 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff.net.0.proj
|
435 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff.net.1
|
436 |
+
up_blocks.2.attentions.0.transformer_blocks.0.ff.net.2
|
437 |
+
up_blocks.2.attentions.0.proj_out
|
438 |
+
up_blocks.2.attentions.1
|
439 |
+
up_blocks.2.attentions.1.norm
|
440 |
+
up_blocks.2.attentions.1.proj_in
|
441 |
+
up_blocks.2.attentions.1.transformer_blocks
|
442 |
+
up_blocks.2.attentions.1.transformer_blocks.0
|
443 |
+
up_blocks.2.attentions.1.transformer_blocks.0.norm1
|
444 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1
|
445 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q
|
446 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k
|
447 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v
|
448 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out
|
449 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0
|
450 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.1
|
451 |
+
up_blocks.2.attentions.1.transformer_blocks.0.norm2
|
452 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2
|
453 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q
|
454 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k
|
455 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v
|
456 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out
|
457 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0
|
458 |
+
up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.1
|
459 |
+
up_blocks.2.attentions.1.transformer_blocks.0.norm3
|
460 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff
|
461 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff.net
|
462 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff.net.0
|
463 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff.net.0.proj
|
464 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff.net.1
|
465 |
+
up_blocks.2.attentions.1.transformer_blocks.0.ff.net.2
|
466 |
+
up_blocks.2.attentions.1.proj_out
|
467 |
+
up_blocks.2.attentions.2
|
468 |
+
up_blocks.2.attentions.2.norm
|
469 |
+
up_blocks.2.attentions.2.proj_in
|
470 |
+
up_blocks.2.attentions.2.transformer_blocks
|
471 |
+
up_blocks.2.attentions.2.transformer_blocks.0
|
472 |
+
up_blocks.2.attentions.2.transformer_blocks.0.norm1
|
473 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1
|
474 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q
|
475 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_k
|
476 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v
|
477 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out
|
478 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.0
|
479 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.1
|
480 |
+
up_blocks.2.attentions.2.transformer_blocks.0.norm2
|
481 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2
|
482 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q
|
483 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_k
|
484 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v
|
485 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out
|
486 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.0
|
487 |
+
up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.1
|
488 |
+
up_blocks.2.attentions.2.transformer_blocks.0.norm3
|
489 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff
|
490 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff.net
|
491 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff.net.0
|
492 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff.net.0.proj
|
493 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff.net.1
|
494 |
+
up_blocks.2.attentions.2.transformer_blocks.0.ff.net.2
|
495 |
+
up_blocks.2.attentions.2.proj_out
|
496 |
+
up_blocks.2.resnets
|
497 |
+
up_blocks.2.resnets.0
|
498 |
+
up_blocks.2.resnets.0.norm1
|
499 |
+
up_blocks.2.resnets.0.conv1
|
500 |
+
up_blocks.2.resnets.0.time_emb_proj
|
501 |
+
up_blocks.2.resnets.0.norm2
|
502 |
+
up_blocks.2.resnets.0.dropout
|
503 |
+
up_blocks.2.resnets.0.conv2
|
504 |
+
up_blocks.2.resnets.0.conv_shortcut
|
505 |
+
up_blocks.2.resnets.1
|
506 |
+
up_blocks.2.resnets.1.norm1
|
507 |
+
up_blocks.2.resnets.1.conv1
|
508 |
+
up_blocks.2.resnets.1.time_emb_proj
|
509 |
+
up_blocks.2.resnets.1.norm2
|
510 |
+
up_blocks.2.resnets.1.dropout
|
511 |
+
up_blocks.2.resnets.1.conv2
|
512 |
+
up_blocks.2.resnets.1.conv_shortcut
|
513 |
+
up_blocks.2.resnets.2
|
514 |
+
up_blocks.2.resnets.2.norm1
|
515 |
+
up_blocks.2.resnets.2.conv1
|
516 |
+
up_blocks.2.resnets.2.time_emb_proj
|
517 |
+
up_blocks.2.resnets.2.norm2
|
518 |
+
up_blocks.2.resnets.2.dropout
|
519 |
+
up_blocks.2.resnets.2.conv2
|
520 |
+
up_blocks.2.resnets.2.conv_shortcut
|
521 |
+
up_blocks.2.upsamplers
|
522 |
+
up_blocks.2.upsamplers.0
|
523 |
+
up_blocks.2.upsamplers.0.conv
|
524 |
+
up_blocks.3
|
525 |
+
up_blocks.3.attentions
|
526 |
+
up_blocks.3.attentions.0
|
527 |
+
up_blocks.3.attentions.0.norm
|
528 |
+
up_blocks.3.attentions.0.proj_in
|
529 |
+
up_blocks.3.attentions.0.transformer_blocks
|
530 |
+
up_blocks.3.attentions.0.transformer_blocks.0
|
531 |
+
up_blocks.3.attentions.0.transformer_blocks.0.norm1
|
532 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1
|
533 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q
|
534 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_k
|
535 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v
|
536 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out
|
537 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.0
|
538 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.1
|
539 |
+
up_blocks.3.attentions.0.transformer_blocks.0.norm2
|
540 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2
|
541 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q
|
542 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_k
|
543 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v
|
544 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out
|
545 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.0
|
546 |
+
up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.1
|
547 |
+
up_blocks.3.attentions.0.transformer_blocks.0.norm3
|
548 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff
|
549 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff.net
|
550 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff.net.0
|
551 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff.net.0.proj
|
552 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff.net.1
|
553 |
+
up_blocks.3.attentions.0.transformer_blocks.0.ff.net.2
|
554 |
+
up_blocks.3.attentions.0.proj_out
|
555 |
+
up_blocks.3.attentions.1
|
556 |
+
up_blocks.3.attentions.1.norm
|
557 |
+
up_blocks.3.attentions.1.proj_in
|
558 |
+
up_blocks.3.attentions.1.transformer_blocks
|
559 |
+
up_blocks.3.attentions.1.transformer_blocks.0
|
560 |
+
up_blocks.3.attentions.1.transformer_blocks.0.norm1
|
561 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1
|
562 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q
|
563 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_k
|
564 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v
|
565 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out
|
566 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.0
|
567 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.1
|
568 |
+
up_blocks.3.attentions.1.transformer_blocks.0.norm2
|
569 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2
|
570 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q
|
571 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_k
|
572 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v
|
573 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out
|
574 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.0
|
575 |
+
up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.1
|
576 |
+
up_blocks.3.attentions.1.transformer_blocks.0.norm3
|
577 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff
|
578 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff.net
|
579 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff.net.0
|
580 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff.net.0.proj
|
581 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff.net.1
|
582 |
+
up_blocks.3.attentions.1.transformer_blocks.0.ff.net.2
|
583 |
+
up_blocks.3.attentions.1.proj_out
|
584 |
+
up_blocks.3.attentions.2
|
585 |
+
up_blocks.3.attentions.2.norm
|
586 |
+
up_blocks.3.attentions.2.proj_in
|
587 |
+
up_blocks.3.attentions.2.transformer_blocks
|
588 |
+
up_blocks.3.attentions.2.transformer_blocks.0
|
589 |
+
up_blocks.3.attentions.2.transformer_blocks.0.norm1
|
590 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1
|
591 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q
|
592 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_k
|
593 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v
|
594 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out
|
595 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.0
|
596 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.1
|
597 |
+
up_blocks.3.attentions.2.transformer_blocks.0.norm2
|
598 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2
|
599 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q
|
600 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_k
|
601 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v
|
602 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out
|
603 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.0
|
604 |
+
up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.1
|
605 |
+
up_blocks.3.attentions.2.transformer_blocks.0.norm3
|
606 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff
|
607 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff.net
|
608 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff.net.0
|
609 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff.net.0.proj
|
610 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff.net.1
|
611 |
+
up_blocks.3.attentions.2.transformer_blocks.0.ff.net.2
|
612 |
+
up_blocks.3.attentions.2.proj_out
|
613 |
+
up_blocks.3.resnets
|
614 |
+
up_blocks.3.resnets.0
|
615 |
+
up_blocks.3.resnets.0.norm1
|
616 |
+
up_blocks.3.resnets.0.conv1
|
617 |
+
up_blocks.3.resnets.0.time_emb_proj
|
618 |
+
up_blocks.3.resnets.0.norm2
|
619 |
+
up_blocks.3.resnets.0.dropout
|
620 |
+
up_blocks.3.resnets.0.conv2
|
621 |
+
up_blocks.3.resnets.0.conv_shortcut
|
622 |
+
up_blocks.3.resnets.1
|
623 |
+
up_blocks.3.resnets.1.norm1
|
624 |
+
up_blocks.3.resnets.1.conv1
|
625 |
+
up_blocks.3.resnets.1.time_emb_proj
|
626 |
+
up_blocks.3.resnets.1.norm2
|
627 |
+
up_blocks.3.resnets.1.dropout
|
628 |
+
up_blocks.3.resnets.1.conv2
|
629 |
+
up_blocks.3.resnets.1.conv_shortcut
|
630 |
+
up_blocks.3.resnets.2
|
631 |
+
up_blocks.3.resnets.2.norm1
|
632 |
+
up_blocks.3.resnets.2.conv1
|
633 |
+
up_blocks.3.resnets.2.time_emb_proj
|
634 |
+
up_blocks.3.resnets.2.norm2
|
635 |
+
up_blocks.3.resnets.2.dropout
|
636 |
+
up_blocks.3.resnets.2.conv2
|
637 |
+
up_blocks.3.resnets.2.conv_shortcut
|
638 |
+
mid_block
|
639 |
+
mid_block.attentions
|
640 |
+
mid_block.attentions.0
|
641 |
+
mid_block.attentions.0.norm
|
642 |
+
mid_block.attentions.0.proj_in
|
643 |
+
mid_block.attentions.0.transformer_blocks
|
644 |
+
mid_block.attentions.0.transformer_blocks.0
|
645 |
+
mid_block.attentions.0.transformer_blocks.0.norm1
|
646 |
+
mid_block.attentions.0.transformer_blocks.0.attn1
|
647 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_q
|
648 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_k
|
649 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_v
|
650 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_out
|
651 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_out.0
|
652 |
+
mid_block.attentions.0.transformer_blocks.0.attn1.to_out.1
|
653 |
+
mid_block.attentions.0.transformer_blocks.0.norm2
|
654 |
+
mid_block.attentions.0.transformer_blocks.0.attn2
|
655 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_q
|
656 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_k
|
657 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_v
|
658 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_out
|
659 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_out.0
|
660 |
+
mid_block.attentions.0.transformer_blocks.0.attn2.to_out.1
|
661 |
+
mid_block.attentions.0.transformer_blocks.0.norm3
|
662 |
+
mid_block.attentions.0.transformer_blocks.0.ff
|
663 |
+
mid_block.attentions.0.transformer_blocks.0.ff.net
|
664 |
+
mid_block.attentions.0.transformer_blocks.0.ff.net.0
|
665 |
+
mid_block.attentions.0.transformer_blocks.0.ff.net.0.proj
|
666 |
+
mid_block.attentions.0.transformer_blocks.0.ff.net.1
|
667 |
+
mid_block.attentions.0.transformer_blocks.0.ff.net.2
|
668 |
+
mid_block.attentions.0.proj_out
|
669 |
+
mid_block.resnets
|
670 |
+
mid_block.resnets.0
|
671 |
+
mid_block.resnets.0.norm1
|
672 |
+
mid_block.resnets.0.conv1
|
673 |
+
mid_block.resnets.0.time_emb_proj
|
674 |
+
mid_block.resnets.0.norm2
|
675 |
+
mid_block.resnets.0.dropout
|
676 |
+
mid_block.resnets.0.conv2
|
677 |
+
mid_block.resnets.1
|
678 |
+
mid_block.resnets.1.norm1
|
679 |
+
mid_block.resnets.1.conv1
|
680 |
+
mid_block.resnets.1.time_emb_proj
|
681 |
+
mid_block.resnets.1.norm2
|
682 |
+
mid_block.resnets.1.dropout
|
683 |
+
mid_block.resnets.1.conv2
|
684 |
+
conv_norm_out
|
685 |
+
conv_out
|
hidiffusion/sd_module_key/sdxl_module_key.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
hidiffusion/utils.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
|
4 |
+
def isinstance_str(x: object, cls_name: str):
|
5 |
+
"""
|
6 |
+
Checks whether x has any class *named* cls_name in its ancestry.
|
7 |
+
Doesn't require access to the class's implementation.
|
8 |
+
|
9 |
+
Useful for patching!
|
10 |
+
"""
|
11 |
+
|
12 |
+
for _cls in x.__class__.__mro__:
|
13 |
+
if _cls.__name__ == cls_name:
|
14 |
+
return True
|
15 |
+
|
16 |
+
return False
|
17 |
+
|
18 |
+
|
19 |
+
def init_generator(device: torch.device, fallback: torch.Generator = None):
|
20 |
+
"""
|
21 |
+
Forks the current default random generator given device.
|
22 |
+
"""
|
23 |
+
if device.type == "cpu":
|
24 |
+
return torch.Generator(device="cpu").set_state(torch.get_rng_state())
|
25 |
+
elif device.type == "cuda":
|
26 |
+
return torch.Generator(device=device).set_state(torch.cuda.get_rng_state())
|
27 |
+
else:
|
28 |
+
if fallback is None:
|
29 |
+
return init_generator(torch.device("cpu"))
|
30 |
+
else:
|
31 |
+
return fallback
|
requirements.txt
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
torch==1.13.1
|
2 |
+
diffusers==0.27.0
|
3 |
+
transformers==4.27.4
|
4 |
+
accelerate==0.18.0
|
5 |
+
xformers==0.0.16rc425
|
6 |
+
triton==2.3.0
|
7 |
+
gradio==4.27.0
|