From 9bfb6165daac74576d5b55ce878be7ad61003033 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Fri, 7 Mar 2025 21:12:05 +0530 Subject: [PATCH] remove full key prefix from peft. --- src/diffusers/loaders/peft.py | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/src/diffusers/loaders/peft.py b/src/diffusers/loaders/peft.py index ee7467fdfe35..aaa2fd4108b1 100644 --- a/src/diffusers/loaders/peft.py +++ b/src/diffusers/loaders/peft.py @@ -192,11 +192,6 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans from peft import LoraConfig, inject_adapter_in_model, set_peft_model_state_dict from peft.tuners.tuners_utils import BaseTunerLayer - try: - from peft.utils.constants import FULLY_QUALIFIED_PATTERN_KEY_PREFIX - except ImportError: - FULLY_QUALIFIED_PATTERN_KEY_PREFIX = None - cache_dir = kwargs.pop("cache_dir", None) force_download = kwargs.pop("force_download", False) proxies = kwargs.pop("proxies", None) @@ -261,22 +256,16 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans # Cannot figure out rank from lora layers that don't have atleast 2 dimensions. # Bias layers in LoRA only have a single dimension if "lora_B" in key and val.ndim > 1: - # Support to handle cases where layer patterns are treated as full layer names - # was added later in PEFT. So, we handle it accordingly. - # TODO: when we fix the minimal PEFT version for Diffusers, - # we should remove `_maybe_adjust_config()`. - if FULLY_QUALIFIED_PATTERN_KEY_PREFIX: - rank[f"{FULLY_QUALIFIED_PATTERN_KEY_PREFIX}{key}"] = val.shape[1] - else: - rank[key] = val.shape[1] + # TODO: revisit this after https://github.com/huggingface/peft/pull/2382 is merged. + rank[key] = val.shape[1] if network_alphas is not None and len(network_alphas) >= 1: alpha_keys = [k for k in network_alphas.keys() if k.startswith(f"{prefix}.")] network_alphas = {k.replace(f"{prefix}.", ""): v for k, v in network_alphas.items() if k in alpha_keys} lora_config_kwargs = get_peft_kwargs(rank, network_alpha_dict=network_alphas, peft_state_dict=state_dict) - if not FULLY_QUALIFIED_PATTERN_KEY_PREFIX: - lora_config_kwargs = _maybe_adjust_config(lora_config_kwargs) + # TODO: revisit this after https://github.com/huggingface/peft/pull/2382 is merged. + lora_config_kwargs = _maybe_adjust_config(lora_config_kwargs) if "use_dora" in lora_config_kwargs: if lora_config_kwargs["use_dora"]: