From 0790c121aad79e35e89413d5031818413944d6d4 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 30 Jul 2023 14:46:36 +0200 Subject: [PATCH] constants.py : add layer norm eps --- constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/constants.py b/constants.py index 024bf7b03..14f11123b 100644 --- a/constants.py +++ b/constants.py @@ -27,6 +27,8 @@ KEY_ATTENTION_HEAD_COUNT = "{llm}.attention.head_count" KEY_ATTENTION_HEAD_COUNT_KV = "{llm}.attention.head_count_kv" KEY_ATTENTION_MAX_ALIBI_BIAS = "{llm}.attention.max_alibi_bias" KEY_ATTENTION_CLAMP_KQV = "{llm}.attention.clamp_kqv" +KEY_ATTENTION_LAYERNORM_EPS = "{llm}.attention.layer_norm_epsilon" +KEY_ATTENTION_LAYERNORM_RMS_EPS = "{llm}.attention.layer_norm_rms_epsilon" # RoPE KEY_ROPE_DIMENSION_COUNT = "{llm}.rope.dimension_count"