From fbc14c61eaee54ab327daed7b4b6ef7a3b95ac01 Mon Sep 17 00:00:00 2001 From: Billy Date: Tue, 24 Jun 2025 06:53:33 +1000 Subject: [PATCH] Remove bundle_emb filter --- .../patches/lora_conversions/sdxl_lora_conversion_utils.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/invokeai/backend/patches/lora_conversions/sdxl_lora_conversion_utils.py b/invokeai/backend/patches/lora_conversions/sdxl_lora_conversion_utils.py index 264fa2ebb8..bee9fb8d5e 100644 --- a/invokeai/backend/patches/lora_conversions/sdxl_lora_conversion_utils.py +++ b/invokeai/backend/patches/lora_conversions/sdxl_lora_conversion_utils.py @@ -22,7 +22,7 @@ def convert_sdxl_keys_to_diffusers_format(state_dict: Dict[str, T]) -> dict[str, ValueError: If state_dict contains an unrecognized key, or not all keys could be converted. Returns: - Dict[str, Tensor]: The diffusers-format state_dict with bundle_emb keys removed. + Dict[str, Tensor]: The diffusers-format state_dict. """ converted_count = 0 # The number of Stability AI keys converted to diffusers format. not_converted_count = 0 # The number of keys that were not converted. @@ -35,10 +35,7 @@ def convert_sdxl_keys_to_diffusers_format(state_dict: Dict[str, T]) -> dict[str, new_state_dict: dict[str, T] = {} for full_key, value in state_dict.items(): - # Skip keys that start with "bundle_emb" - if full_key.startswith("bundle_emb"): - continue - + if full_key.startswith("lora_unet_"): search_key = full_key.replace("lora_unet_", "") # Use bisect to find the key in stability_unet_keys that *may* match the search_key's prefix.