File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -244,7 +244,7 @@ def _fetch_state_dict(
244
244
state_dict = safetensors .torch .load_file (model_file , device = "cpu" )
245
245
if load_with_metadata :
246
246
state_dict = _maybe_populate_state_dict_with_metadata (
247
- state_dict , model_file , metadata_key = "lora_adapter_config "
247
+ state_dict , model_file , metadata_key = "lora_adapter_metadata "
248
248
)
249
249
250
250
except (IOError , safetensors .SafetensorError ) as e :
@@ -915,7 +915,7 @@ def save_function(weights, filename):
915
915
for key , value in lora_adapter_metadata .items ():
916
916
if isinstance (value , set ):
917
917
lora_adapter_metadata [key ] = list (value )
918
- metadata ["lora_adapter_config " ] = json .dumps (lora_adapter_metadata , indent = 2 , sort_keys = True )
918
+ metadata ["lora_adapter_metadata " ] = json .dumps (lora_adapter_metadata , indent = 2 , sort_keys = True )
919
919
920
920
return safetensors .torch .save_file (weights , filename , metadata = metadata )
921
921
Original file line number Diff line number Diff line change @@ -500,7 +500,7 @@ def save_function(weights, filename):
500
500
for key , value in lora_adapter_metadata .items ():
501
501
if isinstance (value , set ):
502
502
lora_adapter_metadata [key ] = list (value )
503
- metadata ["lora_adapter_config " ] = json .dumps (lora_adapter_metadata , indent = 2 , sort_keys = True )
503
+ metadata ["lora_adapter_metadata " ] = json .dumps (lora_adapter_metadata , indent = 2 , sort_keys = True )
504
504
505
505
return safetensors .torch .save_file (weights , filename , metadata = metadata )
506
506
You can’t perform that action at this time.
0 commit comments