Add XLabs FLUX controlnet state dict key file to be used for development/testing.

This commit is contained in:
Ryan Dick
2024-09-30 18:20:44 +00:00
parent 8544ba3798
commit 2cc72b19bc

View File

@@ -0,0 +1,91 @@
# State dict keys for an XLabs FLUX ControlNet model. Intended to be used for unit tests.
# These keys were extracted from:
# https://huggingface.co/XLabs-AI/flux-controlnet-collections/blob/86ab1e915a389d5857135c00e0d350e9e38a9048/flux-canny-controlnet_v2.safetensors
state_dict_keys = [
"controlnet_blocks.0.bias",
"controlnet_blocks.0.weight",
"controlnet_blocks.1.bias",
"controlnet_blocks.1.weight",
"double_blocks.0.img_attn.norm.key_norm.scale",
"double_blocks.0.img_attn.norm.query_norm.scale",
"double_blocks.0.img_attn.proj.bias",
"double_blocks.0.img_attn.proj.weight",
"double_blocks.0.img_attn.qkv.bias",
"double_blocks.0.img_attn.qkv.weight",
"double_blocks.0.img_mlp.0.bias",
"double_blocks.0.img_mlp.0.weight",
"double_blocks.0.img_mlp.2.bias",
"double_blocks.0.img_mlp.2.weight",
"double_blocks.0.img_mod.lin.bias",
"double_blocks.0.img_mod.lin.weight",
"double_blocks.0.txt_attn.norm.key_norm.scale",
"double_blocks.0.txt_attn.norm.query_norm.scale",
"double_blocks.0.txt_attn.proj.bias",
"double_blocks.0.txt_attn.proj.weight",
"double_blocks.0.txt_attn.qkv.bias",
"double_blocks.0.txt_attn.qkv.weight",
"double_blocks.0.txt_mlp.0.bias",
"double_blocks.0.txt_mlp.0.weight",
"double_blocks.0.txt_mlp.2.bias",
"double_blocks.0.txt_mlp.2.weight",
"double_blocks.0.txt_mod.lin.bias",
"double_blocks.0.txt_mod.lin.weight",
"double_blocks.1.img_attn.norm.key_norm.scale",
"double_blocks.1.img_attn.norm.query_norm.scale",
"double_blocks.1.img_attn.proj.bias",
"double_blocks.1.img_attn.proj.weight",
"double_blocks.1.img_attn.qkv.bias",
"double_blocks.1.img_attn.qkv.weight",
"double_blocks.1.img_mlp.0.bias",
"double_blocks.1.img_mlp.0.weight",
"double_blocks.1.img_mlp.2.bias",
"double_blocks.1.img_mlp.2.weight",
"double_blocks.1.img_mod.lin.bias",
"double_blocks.1.img_mod.lin.weight",
"double_blocks.1.txt_attn.norm.key_norm.scale",
"double_blocks.1.txt_attn.norm.query_norm.scale",
"double_blocks.1.txt_attn.proj.bias",
"double_blocks.1.txt_attn.proj.weight",
"double_blocks.1.txt_attn.qkv.bias",
"double_blocks.1.txt_attn.qkv.weight",
"double_blocks.1.txt_mlp.0.bias",
"double_blocks.1.txt_mlp.0.weight",
"double_blocks.1.txt_mlp.2.bias",
"double_blocks.1.txt_mlp.2.weight",
"double_blocks.1.txt_mod.lin.bias",
"double_blocks.1.txt_mod.lin.weight",
"guidance_in.in_layer.bias",
"guidance_in.in_layer.weight",
"guidance_in.out_layer.bias",
"guidance_in.out_layer.weight",
"img_in.bias",
"img_in.weight",
"input_hint_block.0.bias",
"input_hint_block.0.weight",
"input_hint_block.10.bias",
"input_hint_block.10.weight",
"input_hint_block.12.bias",
"input_hint_block.12.weight",
"input_hint_block.14.bias",
"input_hint_block.14.weight",
"input_hint_block.2.bias",
"input_hint_block.2.weight",
"input_hint_block.4.bias",
"input_hint_block.4.weight",
"input_hint_block.6.bias",
"input_hint_block.6.weight",
"input_hint_block.8.bias",
"input_hint_block.8.weight",
"pos_embed_input.bias",
"pos_embed_input.weight",
"time_in.in_layer.bias",
"time_in.in_layer.weight",
"time_in.out_layer.bias",
"time_in.out_layer.weight",
"txt_in.bias",
"txt_in.weight",
"vector_in.in_layer.bias",
"vector_in.in_layer.weight",
"vector_in.out_layer.bias",
"vector_in.out_layer.weight",
]