Rename backend/patches/conversions/ to backend/patches/lora_conversions/

This commit is contained in:
Ryan Dick
2024-12-13 14:42:29 +00:00
parent 42f8d6aa11
commit 41664f88db
21 changed files with 37 additions and 31 deletions

View File

@@ -48,7 +48,7 @@ from invokeai.backend.flux.sampling_utils import (
)
from invokeai.backend.flux.text_conditioning import FluxTextConditioning
from invokeai.backend.model_manager.config import ModelFormat
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw
from invokeai.backend.patches.lora_patcher import LoRAPatcher
from invokeai.backend.stable_diffusion.diffusers_pipeline import PipelineIntermediateState

View File

@@ -18,7 +18,7 @@ from invokeai.app.invocations.primitives import FluxConditioningOutput
from invokeai.app.services.shared.invocation_context import InvocationContext
from invokeai.backend.flux.modules.conditioner import HFEncoder
from invokeai.backend.model_manager.config import ModelFormat
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw
from invokeai.backend.patches.lora_patcher import LoRAPatcher
from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ConditioningFieldData, FLUXConditioningInfo

View File

@@ -17,7 +17,7 @@ from invokeai.app.invocations.model import CLIPField, T5EncoderField
from invokeai.app.invocations.primitives import SD3ConditioningOutput
from invokeai.app.services.shared.invocation_context import InvocationContext
from invokeai.backend.model_manager.config import ModelFormat
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw
from invokeai.backend.patches.lora_patcher import LoRAPatcher
from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ConditioningFieldData, SD3ConditioningInfo

View File

@@ -20,19 +20,19 @@ from invokeai.backend.model_manager import (
from invokeai.backend.model_manager.load.load_default import ModelLoader
from invokeai.backend.model_manager.load.model_cache.model_cache_base import ModelCacheBase
from invokeai.backend.model_manager.load.model_loader_registry import ModelLoaderRegistry
from invokeai.backend.patches.conversions.flux_control_lora_utils import (
from invokeai.backend.patches.lora_conversions.flux_control_lora_utils import (
is_state_dict_likely_flux_control,
lora_model_from_flux_control_state_dict,
)
from invokeai.backend.patches.conversions.flux_diffusers_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_diffusers_lora_conversion_utils import (
lora_model_from_flux_diffusers_state_dict,
)
from invokeai.backend.patches.conversions.flux_kohya_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_kohya_lora_conversion_utils import (
is_state_dict_likely_in_flux_kohya_format,
lora_model_from_flux_kohya_state_dict,
)
from invokeai.backend.patches.conversions.sd_lora_conversion_utils import lora_model_from_sd_state_dict
from invokeai.backend.patches.conversions.sdxl_lora_conversion_utils import convert_sdxl_keys_to_diffusers_format
from invokeai.backend.patches.lora_conversions.sd_lora_conversion_utils import lora_model_from_sd_state_dict
from invokeai.backend.patches.lora_conversions.sdxl_lora_conversion_utils import convert_sdxl_keys_to_diffusers_format
@ModelLoaderRegistry.register(base=BaseModelType.Any, type=ModelType.LoRA, format=ModelFormat.Diffusers)

View File

@@ -39,11 +39,11 @@ from invokeai.backend.model_manager.util.model_util import (
lora_token_vector_length,
read_checkpoint_meta,
)
from invokeai.backend.patches.conversions.flux_control_lora_utils import is_state_dict_likely_flux_control
from invokeai.backend.patches.conversions.flux_diffusers_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_control_lora_utils import is_state_dict_likely_flux_control
from invokeai.backend.patches.lora_conversions.flux_diffusers_lora_conversion_utils import (
is_state_dict_likely_in_flux_diffusers_format,
)
from invokeai.backend.patches.conversions.flux_kohya_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_kohya_lora_conversion_utils import (
is_state_dict_likely_in_flux_kohya_format,
)
from invokeai.backend.quantization.gguf.ggml_tensor import GGMLTensor

View File

@@ -3,10 +3,10 @@ from typing import Any, Dict
import torch
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.layers.any_lora_layer import AnyLoRALayer
from invokeai.backend.patches.layers.lora_layer import LoRALayer
from invokeai.backend.patches.layers.set_parameter_layer import SetParameterLayer
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw
# A regex pattern that matches all of the keys in the Flux Dev/Canny LoRA format.

View File

@@ -2,10 +2,10 @@ from typing import Dict
import torch
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.layers.any_lora_layer import AnyLoRALayer
from invokeai.backend.patches.layers.concatenated_lora_layer import ConcatenatedLoRALayer
from invokeai.backend.patches.layers.lora_layer import LoRALayer
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw

View File

@@ -3,9 +3,12 @@ from typing import Any, Dict, TypeVar
import torch
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX, FLUX_LORA_TRANSFORMER_PREFIX
from invokeai.backend.patches.layers.any_lora_layer import AnyLoRALayer
from invokeai.backend.patches.layers.utils import any_lora_layer_from_state_dict
from invokeai.backend.patches.lora_conversions.flux_lora_constants import (
FLUX_LORA_CLIP_PREFIX,
FLUX_LORA_TRANSFORMER_PREFIX,
)
from invokeai.backend.patches.lora_model_raw import LoRAModelRaw
# A regex pattern that matches all of the transformer keys in the Kohya FLUX LoRA format.

View File

@@ -1,18 +1,18 @@
import pytest
import torch
from invokeai.backend.patches.conversions.flux_control_lora_utils import (
from invokeai.backend.patches.lora_conversions.flux_control_lora_utils import (
is_state_dict_likely_flux_control,
lora_model_from_flux_control_state_dict,
)
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from tests.backend.patches.conversions.lora_state_dicts.flux_control_lora_format import (
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_control_lora_format import (
state_dict_keys as flux_control_lora_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_diffusers_format import (
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_diffusers_format import (
state_dict_keys as flux_diffusers_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.utils import keys_to_mock_state_dict
from tests.backend.patches.lora_conversions.lora_state_dicts.utils import keys_to_mock_state_dict
@pytest.mark.parametrize("sd_keys", [flux_control_lora_state_dict_keys])

View File

@@ -1,21 +1,21 @@
import pytest
import torch
from invokeai.backend.patches.conversions.flux_diffusers_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_diffusers_lora_conversion_utils import (
is_state_dict_likely_in_flux_diffusers_format,
lora_model_from_flux_diffusers_state_dict,
)
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_diffusers_format import (
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_diffusers_format import (
state_dict_keys as flux_diffusers_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_diffusers_no_proj_mlp_format import (
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_diffusers_no_proj_mlp_format import (
state_dict_keys as flux_diffusers_no_proj_mlp_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_kohya_format import (
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_kohya_format import (
state_dict_keys as flux_kohya_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.utils import keys_to_mock_state_dict
from tests.backend.patches.lora_conversions.lora_state_dicts.utils import keys_to_mock_state_dict
@pytest.mark.parametrize("sd_keys", [flux_diffusers_state_dict_keys, flux_diffusers_no_proj_mlp_state_dict_keys])

View File

@@ -4,22 +4,25 @@ import torch
from invokeai.backend.flux.model import Flux
from invokeai.backend.flux.util import params
from invokeai.backend.patches.conversions.flux_kohya_lora_conversion_utils import (
from invokeai.backend.patches.lora_conversions.flux_kohya_lora_conversion_utils import (
_convert_flux_transformer_kohya_state_dict_to_invoke_format,
is_state_dict_likely_in_flux_kohya_format,
lora_model_from_flux_kohya_state_dict,
)
from invokeai.backend.patches.conversions.flux_lora_constants import FLUX_LORA_CLIP_PREFIX, FLUX_LORA_TRANSFORMER_PREFIX
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_diffusers_format import (
from invokeai.backend.patches.lora_conversions.flux_lora_constants import (
FLUX_LORA_CLIP_PREFIX,
FLUX_LORA_TRANSFORMER_PREFIX,
)
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_diffusers_format import (
state_dict_keys as flux_diffusers_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_kohya_format import (
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_kohya_format import (
state_dict_keys as flux_kohya_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.flux_lora_kohya_with_te1_format import (
from tests.backend.patches.lora_conversions.lora_state_dicts.flux_lora_kohya_with_te1_format import (
state_dict_keys as flux_kohya_te1_state_dict_keys,
)
from tests.backend.patches.conversions.lora_state_dicts.utils import keys_to_mock_state_dict
from tests.backend.patches.lora_conversions.lora_state_dicts.utils import keys_to_mock_state_dict
@pytest.mark.parametrize("sd_keys", [flux_kohya_state_dict_keys, flux_kohya_te1_state_dict_keys])