From 12f65d800d67bc0e51c2cdc9fb5e40165adebaf1 Mon Sep 17 00:00:00 2001 From: Billy Date: Thu, 19 Jun 2025 09:40:58 +1000 Subject: [PATCH] Formatting --- .../backend/model_manager/load/model_loaders/lora.py | 4 ++-- invokeai/backend/model_manager/omi.py | 11 ++++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/invokeai/backend/model_manager/load/model_loaders/lora.py b/invokeai/backend/model_manager/load/model_loaders/lora.py index dee1717709..4266256a8e 100644 --- a/invokeai/backend/model_manager/load/model_loaders/lora.py +++ b/invokeai/backend/model_manager/load/model_loaders/lora.py @@ -39,11 +39,11 @@ from invokeai.backend.patches.lora_conversions.flux_onetrainer_lora_conversion_u from invokeai.backend.patches.lora_conversions.sd_lora_conversion_utils import lora_model_from_sd_state_dict from invokeai.backend.patches.lora_conversions.sdxl_lora_conversion_utils import convert_sdxl_keys_to_diffusers_format + @ModelLoaderRegistry.register(base=BaseModelType.Flux, type=ModelType.LoRA, format=ModelFormat.OMI) @ModelLoaderRegistry.register(base=BaseModelType.StableDiffusion1, type=ModelType.LoRA, format=ModelFormat.OMI) @ModelLoaderRegistry.register(base=BaseModelType.StableDiffusion3, type=ModelType.LoRA, format=ModelFormat.OMI) @ModelLoaderRegistry.register(base=BaseModelType.StableDiffusionXL, type=ModelType.LoRA, format=ModelFormat.OMI) - @ModelLoaderRegistry.register(base=BaseModelType.Any, type=ModelType.LoRA, format=ModelFormat.Diffusers) @ModelLoaderRegistry.register(base=BaseModelType.Any, type=ModelType.LoRA, format=ModelFormat.LyCORIS) @ModelLoaderRegistry.register(base=BaseModelType.Flux, type=ModelType.ControlLoRa, format=ModelFormat.LyCORIS) @@ -79,7 +79,7 @@ class LoRALoader(ModelLoader): state_dict = torch.load(model_path, map_location="cpu") if config.format == ModelFormat.OMI: - state_dict = convert_to_omi(state_dict. config.base) # type: ignore + state_dict = convert_to_omi(state_dict.config.base) # type: ignore # Apply state_dict key conversions, if necessary. if self._model_base == BaseModelType.StableDiffusionXL: diff --git a/invokeai/backend/model_manager/omi.py b/invokeai/backend/model_manager/omi.py index 477294c6ec..5dd595899f 100644 --- a/invokeai/backend/model_manager/omi.py +++ b/invokeai/backend/model_manager/omi.py @@ -1,10 +1,11 @@ +import omi_model_standards.convert.lora.convert_lora_util as lora_util +from omi_model_standards.convert.lora.convert_flux_lora import convert_flux_lora_key_sets +from omi_model_standards.convert.lora.convert_sd3_lora import convert_sd3_lora_key_sets +from omi_model_standards.convert.lora.convert_sd_lora import convert_sd_lora_key_sets +from omi_model_standards.convert.lora.convert_sdxl_lora import convert_sdxl_lora_key_sets + from invokeai.backend.model_manager.model_on_disk import StateDict from invokeai.backend.model_manager.taxonomy import BaseModelType -from omi_model_standards.convert.lora.convert_sdxl_lora import convert_sdxl_lora_key_sets -from omi_model_standards.convert.lora.convert_flux_lora import convert_flux_lora_key_sets -from omi_model_standards.convert.lora.convert_sd_lora import convert_sd_lora_key_sets -from omi_model_standards.convert.lora.convert_sd3_lora import convert_sd3_lora_key_sets -import omi_model_standards.convert.lora.convert_lora_util as lora_util def convert_to_omi(weights_sd: StateDict, base: BaseModelType):