Skip to content

Commit 051f534

Browse files
author
Linoy
committed
copies fix
1 parent 9fa3d93 commit 051f534

File tree

1 file changed

+1
-5
lines changed

1 file changed

+1
-5
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4313,11 +4313,7 @@ def load_lora_weights(
43134313

43144314
# First, ensure that the checkpoint is a compatible one and can be successfully loaded.
43154315
state_dict = self.lora_state_dict(pretrained_model_name_or_path_or_dict, **kwargs)
4316-
# convert T2V LoRA to I2V LoRA (when loaded to Wan I2V) by adding zeros for the additional (missing) _img layers
4317-
state_dict = self._maybe_expand_t2v_lora_for_i2v(
4318-
transformer=getattr(self, self.transformer_name) if not hasattr(self, "transformer") else self.transformer,
4319-
state_dict=state_dict,
4320-
)
4316+
43214317
is_correct_format = all("lora" in key for key in state_dict.keys())
43224318
if not is_correct_format:
43234319
raise ValueError("Invalid LoRA checkpoint.")

0 commit comments

Comments
 (0)