remove final_norm

This commit is contained in:
younesbelkada 2025-07-08 11:26:04 +04:00
parent adff470c8a
commit 097df0ed85
2 changed files with 2 additions and 4 deletions

View File

@ -4530,7 +4530,7 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
// output // output
{ {
output = create_tensor(tn(LLM_TENSOR_OUTPUT, "weight"), {hidden_size, n_vocab}, TENSOR_NOT_REQUIRED); output = create_tensor(tn(LLM_TENSOR_OUTPUT, "weight"), {hidden_size, n_vocab}, TENSOR_NOT_REQUIRED);
final_norm = create_tensor(tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {hidden_size}, 0); output_norm = create_tensor(tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {hidden_size}, 0);
} }
for (int i = 0; i < n_layer; ++i) { for (int i = 0; i < n_layer; ++i) {
@ -14740,7 +14740,7 @@ struct llm_build_falcon_h1 : public llm_graph_context {
cur = inpL; cur = inpL;
cur = build_norm(cur, cur = build_norm(cur,
model.final_norm, NULL, model.output_norm, NULL,
LLM_NORM_RMS, -1); LLM_NORM_RMS, -1);
cb(cur, "result_norm", -1); cb(cur, "result_norm", -1);

View File

@ -173,7 +173,6 @@ struct llama_layer {
struct ggml_tensor * attn_norm_cross = nullptr; struct ggml_tensor * attn_norm_cross = nullptr;
struct ggml_tensor * attn_norm_enc = nullptr; struct ggml_tensor * attn_norm_enc = nullptr;
struct ggml_tensor * ssm_norm = nullptr; struct ggml_tensor * ssm_norm = nullptr;
struct ggml_tensor * final_norm = nullptr;
// attention // attention
struct ggml_tensor * wq = nullptr; struct ggml_tensor * wq = nullptr;
@ -365,7 +364,6 @@ struct llama_model {
struct ggml_tensor * output = nullptr; struct ggml_tensor * output = nullptr;
struct ggml_tensor * output_b = nullptr; struct ggml_tensor * output_b = nullptr;
struct ggml_tensor * output_norm_enc = nullptr; struct ggml_tensor * output_norm_enc = nullptr;
struct ggml_tensor * final_norm = nullptr;
// classifier // classifier
struct ggml_tensor * cls = nullptr; struct ggml_tensor * cls = nullptr;