Skip to content

Commit a4a15b5

Browse files
committed
update
1 parent 0eba7e7 commit a4a15b5

File tree

1 file changed

+39
-13
lines changed

1 file changed

+39
-13
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 39 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1328,7 +1328,9 @@ def load_lora_into_transformer(
13281328
adapter_name (`str`, *optional*):
13291329
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
13301330
`default_{i}` where i is the total number of adapters being loaded.
1331-
metadata: TODO
1331+
metadata (`dict`):
1332+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
1333+
from the state dict.
13321334
low_cpu_mem_usage (`bool`, *optional*):
13331335
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
13341336
weights.
@@ -1762,7 +1764,9 @@ def load_lora_into_transformer(
17621764
adapter_name (`str`, *optional*):
17631765
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
17641766
`default_{i}` where i is the total number of adapters being loaded.
1765-
metadata: TODO
1767+
metadata (`dict`):
1768+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
1769+
from the state dict.
17661770
low_cpu_mem_usage (`bool`, *optional*):
17671771
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
17681772
weights.
@@ -2215,7 +2219,9 @@ def load_lora_into_transformer(
22152219
adapter_name (`str`, *optional*):
22162220
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
22172221
`default_{i}` where i is the total number of adapters being loaded.
2218-
metadata: TODO
2222+
metadata (`dict`):
2223+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
2224+
from the state dict.
22192225
low_cpu_mem_usage (`bool`, *optional*):
22202226
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
22212227
weights.
@@ -2812,7 +2818,9 @@ def load_lora_into_transformer(
28122818
adapter_name (`str`, *optional*):
28132819
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
28142820
`default_{i}` where i is the total number of adapters being loaded.
2815-
metadata: TODO
2821+
metadata (`dict`):
2822+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
2823+
from the state dict.
28162824
low_cpu_mem_usage (`bool`, *optional*):
28172825
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
28182826
weights.
@@ -3141,7 +3149,9 @@ def load_lora_into_transformer(
31413149
adapter_name (`str`, *optional*):
31423150
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
31433151
`default_{i}` where i is the total number of adapters being loaded.
3144-
metadata: TODO
3152+
metadata (`dict`):
3153+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
3154+
from the state dict.
31453155
low_cpu_mem_usage (`bool`, *optional*):
31463156
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
31473157
weights.
@@ -3479,7 +3489,9 @@ def load_lora_into_transformer(
34793489
adapter_name (`str`, *optional*):
34803490
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
34813491
`default_{i}` where i is the total number of adapters being loaded.
3482-
metadata: TODO
3492+
metadata (`dict`):
3493+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
3494+
from the state dict.
34833495
low_cpu_mem_usage (`bool`, *optional*):
34843496
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
34853497
weights.
@@ -3821,7 +3833,9 @@ def load_lora_into_transformer(
38213833
adapter_name (`str`, *optional*):
38223834
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
38233835
`default_{i}` where i is the total number of adapters being loaded.
3824-
metadata: TODO
3836+
metadata (`dict`):
3837+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
3838+
from the state dict.
38253839
low_cpu_mem_usage (`bool`, *optional*):
38263840
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
38273841
weights.
@@ -4161,7 +4175,9 @@ def load_lora_into_transformer(
41614175
adapter_name (`str`, *optional*):
41624176
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
41634177
`default_{i}` where i is the total number of adapters being loaded.
4164-
metadata: TODO
4178+
metadata (`dict`):
4179+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
4180+
from the state dict.
41654181
low_cpu_mem_usage (`bool`, *optional*):
41664182
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
41674183
weights.
@@ -4503,7 +4519,9 @@ def load_lora_into_transformer(
45034519
adapter_name (`str`, *optional*):
45044520
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
45054521
`default_{i}` where i is the total number of adapters being loaded.
4506-
metadata: TODO
4522+
metadata (`dict`):
4523+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
4524+
from the state dict.
45074525
low_cpu_mem_usage (`bool`, *optional*):
45084526
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
45094527
weights.
@@ -4846,7 +4864,9 @@ def load_lora_into_transformer(
48464864
adapter_name (`str`, *optional*):
48474865
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
48484866
`default_{i}` where i is the total number of adapters being loaded.
4849-
metadata: TODO
4867+
metadata (`dict`):
4868+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
4869+
from the state dict.
48504870
low_cpu_mem_usage (`bool`, *optional*):
48514871
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
48524872
weights.
@@ -5239,7 +5259,9 @@ def load_lora_into_transformer(
52395259
adapter_name (`str`, *optional*):
52405260
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
52415261
`default_{i}` where i is the total number of adapters being loaded.
5242-
metadata: TODO
5262+
metadata (`dict`):
5263+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
5264+
from the state dict.
52435265
low_cpu_mem_usage (`bool`, *optional*):
52445266
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
52455267
weights.
@@ -5579,7 +5601,9 @@ def load_lora_into_transformer(
55795601
adapter_name (`str`, *optional*):
55805602
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
55815603
`default_{i}` where i is the total number of adapters being loaded.
5582-
metadata: TODO
5604+
metadata (`dict`):
5605+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
5606+
from the state dict.
55835607
low_cpu_mem_usage (`bool`, *optional*):
55845608
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
55855609
weights.
@@ -5921,7 +5945,9 @@ def load_lora_into_transformer(
59215945
adapter_name (`str`, *optional*):
59225946
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
59235947
`default_{i}` where i is the total number of adapters being loaded.
5924-
metadata: TODO
5948+
metadata (`dict`):
5949+
Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
5950+
from the state dict.
59255951
low_cpu_mem_usage (`bool`, *optional*):
59265952
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
59275953
weights.

0 commit comments

Comments
 (0)