@@ -1752,7 +1752,7 @@ class Mistral3Model(LlamaModel):
1752
1752
1753
1753
# we need to merge the text_config into the root level of hparams
1754
1754
def __init__ (self , * args , ** kwargs ):
1755
- hparams = Model .load_hparams (kwargs [ "dir_model" ])
1755
+ hparams = kwargs [ "hparams" ] if "hparams" in kwargs else Model .load_hparams (args [ 0 ])
1756
1756
if "text_config" in hparams :
1757
1757
hparams = {** hparams , ** hparams ["text_config" ]}
1758
1758
kwargs ["hparams" ] = hparams
@@ -3385,7 +3385,7 @@ class Gemma3Model(Model):
3385
3385
3386
3386
# we need to merge the text_config into the root level of hparams
3387
3387
def __init__ (self , * args , ** kwargs ):
3388
- hparams = Model .load_hparams (kwargs [ "dir_model" ])
3388
+ hparams = kwargs [ "hparams" ] if "hparams" in kwargs else Model .load_hparams (args [ 0 ])
3389
3389
if "text_config" in hparams :
3390
3390
hparams = {** hparams , ** hparams ["text_config" ]}
3391
3391
kwargs ["hparams" ] = hparams
@@ -5358,7 +5358,7 @@ def main() -> None:
5358
5358
logger .error (f"Model { model_architecture } is not supported" )
5359
5359
sys .exit (1 )
5360
5360
5361
- model_instance = model_class (dir_model = dir_model , ftype = output_type , fname_out = fname_out ,
5361
+ model_instance = model_class (dir_model , output_type , fname_out ,
5362
5362
is_big_endian = args .bigendian , use_temp_file = args .use_temp_file ,
5363
5363
eager = args .no_lazy ,
5364
5364
metadata_override = args .metadata , model_name = args .model_name ,
0 commit comments