Skip to content

Commit 51e1ff4

Browse files
committed
cont [no ci]
1 parent a655269 commit 51e1ff4

File tree

1 file changed

+6
-7
lines changed

1 file changed

+6
-7
lines changed

src/llama.cpp

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@
7777
#endif
7878

7979
// bump if necessary
80-
#define LLAMA_MAX_EMBD 8
8180
#define LLAMA_MAX_LAYERS 512
8281
#define LLAMA_MAX_EXPERTS 160 // DeepSeekV2
8382

@@ -3074,8 +3073,8 @@ struct llama_model {
30743073
struct ggml_tensor * cls_out = nullptr;
30753074
struct ggml_tensor * cls_out_b = nullptr;
30763075

3077-
struct ggml_tensor * conv_1d = nullptr;
3078-
struct ggml_tensor * conv_1d_b = nullptr;
3076+
struct ggml_tensor * conv1d = nullptr;
3077+
struct ggml_tensor * conv1d_b = nullptr;
30793078

30803079
std::vector<llama_layer> layers;
30813080

@@ -9498,8 +9497,8 @@ static bool llm_load_tensors(
94989497
{
94999498
model.tok_embd = create_tensor(tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {hparams.n_embd_features, n_vocab}, 0);
95009499

9501-
model.conv_1d = create_tensor(tn(LLM_TENSOR_CONV1D, "weight"), {7, hparams.n_embd_features, hparams.posnet.n_embd}, 0);
9502-
model.conv_1d_b = create_tensor(tn(LLM_TENSOR_CONV1D, "bias"), {1, hparams.posnet.n_embd}, 0);
9500+
model.conv1d = create_tensor(tn(LLM_TENSOR_CONV1D, "weight"), {7, hparams.n_embd_features, hparams.posnet.n_embd}, 0);
9501+
model.conv1d_b = create_tensor(tn(LLM_TENSOR_CONV1D, "bias"), {1, hparams.posnet.n_embd}, 0);
95039502

95049503
// posnet
95059504
{
@@ -17183,8 +17182,8 @@ struct llm_build_context {
1718317182

1718417183
cur = ggml_cont(ctx0, ggml_transpose(ctx0, inpL));
1718517184

17186-
cur = ggml_conv_1d_ph(ctx0, model.conv_1d, cur, 1, 1);
17187-
cur = ggml_add(ctx0, cur, model.conv_1d_b);
17185+
cur = ggml_conv_1d_ph(ctx0, model.conv1d, cur, 1, 1);
17186+
cur = ggml_add(ctx0, cur, model.conv1d_b);
1718817187

1718917188
// posnet
1719017189
for (uint32_t il = 0; il < hparams.posnet.n_layer; ++il) {

0 commit comments

Comments
 (0)