Skip to content

Commit 609f17d

Browse files
committed
tts : remove hardcoded constants
ggml-ci
1 parent 985d59f commit 609f17d

File tree

5 files changed

+110
-66
lines changed

5 files changed

+110
-66
lines changed

convert_hf_to_gguf.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2055,7 +2055,11 @@ def set_vocab(self):
20552055

20562056
def set_gguf_parameters(self):
20572057
super().set_gguf_parameters()
2058-
self.gguf_writer.add_vocab_size(self.hparams["vocab_size"])
2058+
self.gguf_writer.add_vocab_size (self.hparams["vocab_size"])
2059+
self.gguf_writer.add_features_length (self.hparams["n_embd_features"])
2060+
self.gguf_writer.add_posnet_length (self.hparams["n_embd_posnet"])
2061+
self.gguf_writer.add_convnext_length (self.hparams["n_embd_convnext"])
2062+
self.gguf_writer.add_feed_forward_length(self.hparams["n_ff"])
20592063

20602064

20612065
@Model.register("Qwen2MoeForCausalLM")

examples/tts/convert_pt_to_hf.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,10 @@ def flatten_state_dict(state_dict, parent_key='', sep='.'):
147147
"WavTokenizerDec"
148148
],
149149
"hidden_size": 1282,
150+
"n_embd_features": 512,
151+
"n_embd_posnet": 768,
152+
"n_embd_convnext": 768,
153+
"n_ff": 2304,
150154
"vocab_size": 4096,
151155
"n_head": 1,
152156
"layer_norm_epsilon": 1e-6,

gguf-py/gguf/constants.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,9 @@ class LLM:
9090
VOCAB_SIZE = "{arch}.vocab_size"
9191
CONTEXT_LENGTH = "{arch}.context_length"
9292
EMBEDDING_LENGTH = "{arch}.embedding_length"
93+
FEATURES_LENGTH = "{arch}.features_length"
94+
POSNET_LENGTH = "{arch}.posnet_length"
95+
CONVNEXT_LENGTH = "{arch}.convnext_length"
9396
BLOCK_COUNT = "{arch}.block_count"
9497
LEADING_DENSE_BLOCK_COUNT = "{arch}.leading_dense_block_count"
9598
FEED_FORWARD_LENGTH = "{arch}.feed_forward_length"

gguf-py/gguf/gguf_writer.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -631,6 +631,18 @@ def add_context_length(self, length: int) -> None:
631631
def add_embedding_length(self, length: int) -> None:
632632
self.add_uint32(Keys.LLM.EMBEDDING_LENGTH.format(arch=self.arch), length)
633633

634+
def add_embedding_length(self, length: int) -> None:
635+
self.add_uint32(Keys.LLM.EMBEDDING_LENGTH.format(arch=self.arch), length)
636+
637+
def add_features_length(self, length: int) -> None:
638+
self.add_uint32(Keys.LLM.FEATURES_LENGTH.format(arch=self.arch), length)
639+
640+
def add_posnet_length(self, length: int) -> None:
641+
self.add_uint32(Keys.LLM.POSNET_LENGTH.format(arch=self.arch), length)
642+
643+
def add_convnext_length(self, length: int) -> None:
644+
self.add_uint32(Keys.LLM.CONVNEXT_LENGTH.format(arch=self.arch), length)
645+
634646
def add_block_count(self, length: int) -> None:
635647
self.add_uint32(Keys.LLM.BLOCK_COUNT.format(arch=self.arch), length)
636648

0 commit comments

Comments
 (0)