From 717d6e43d4cee56633546a675fcbd366ebdaa4ad Mon Sep 17 00:00:00 2001 From: Valeriy Selitskiy <239034+iamwavecut@users.noreply.github.com> Date: Sat, 3 May 2025 21:58:12 +0200 Subject: [PATCH 1/2] [lora_conversion] Enhance key handling for OneTrainer components in LORA conversion utility (#11441) --- .../loaders/lora_conversion_utils.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/src/diffusers/loaders/lora_conversion_utils.py b/src/diffusers/loaders/lora_conversion_utils.py index a9e154af3cec..75a88916415d 100644 --- a/src/diffusers/loaders/lora_conversion_utils.py +++ b/src/diffusers/loaders/lora_conversion_utils.py @@ -727,8 +727,25 @@ def _convert(original_key, diffusers_key, state_dict, new_state_dict): elif k.startswith("lora_te1_"): has_te_keys = True continue + elif k.startswith("lora_transformer_context_embedder"): + diffusers_key = "context_embedder" + elif k.startswith("lora_transformer_norm_out_linear"): + diffusers_key = "norm_out.linear" + elif k.startswith("lora_transformer_proj_out"): + diffusers_key = "proj_out" + elif k.startswith("lora_transformer_x_embedder"): + diffusers_key = "x_embedder" + elif k.startswith("lora_transformer_time_text_embed_guidance_embedder_linear_"): + i = int(k.split("lora_transformer_time_text_embed_guidance_embedder_linear_")[-1]) + diffusers_key = f"time_text_embed.guidance_embedder.linear_{i}" + elif k.startswith("lora_transformer_time_text_embed_text_embedder_linear_"): + i = int(k.split("lora_transformer_time_text_embed_text_embedder_linear_")[-1]) + diffusers_key = f"time_text_embed.text_embedder.linear_{i}" + elif k.startswith("lora_transformer_time_text_embed_timestep_embedder_linear_"): + i = int(k.split("lora_transformer_time_text_embed_timestep_embedder_linear_")[-1]) + diffusers_key = f"time_text_embed.timestep_embedder.linear_{i}" else: - raise NotImplementedError + raise NotImplementedError(k) if "attn_" in k: if "_to_out_0" in k: From 1208bb4c153fffa7d89b375085962ea720fb3ad8 Mon Sep 17 00:00:00 2001 From: Valeriy Selitskiy <239034+iamwavecut@users.noreply.github.com> Date: Tue, 6 May 2025 14:18:12 +0200 Subject: [PATCH 2/2] Update src/diffusers/loaders/lora_conversion_utils.py Co-authored-by: Sayak Paul --- src/diffusers/loaders/lora_conversion_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/loaders/lora_conversion_utils.py b/src/diffusers/loaders/lora_conversion_utils.py index 75a88916415d..d5fa7dcfc3a8 100644 --- a/src/diffusers/loaders/lora_conversion_utils.py +++ b/src/diffusers/loaders/lora_conversion_utils.py @@ -745,7 +745,7 @@ def _convert(original_key, diffusers_key, state_dict, new_state_dict): i = int(k.split("lora_transformer_time_text_embed_timestep_embedder_linear_")[-1]) diffusers_key = f"time_text_embed.timestep_embedder.linear_{i}" else: - raise NotImplementedError(k) + raise NotImplementedError(f"Handling for key ({k}) is not implemented.") if "attn_" in k: if "_to_out_0" in k: