Fix NTK (alpha) and RoPE scaling for exllamav2 and exllamav2_HF (#3897)

This commit is contained in:
Panchovix 2023-09-13 02:35:09 -03:00 committed by GitHub
parent eb9ebabec7
commit 34dc7306b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 5 deletions

View File

@ -28,9 +28,9 @@ class Exllamav2Model:
config.prepare()
config.max_seq_len = shared.args.max_seq_len
config.rope_scale = shared.args.compress_pos_emb
config.rope_alpha = shared.args.alpha_value
config.scale_pos_emb = shared.args.compress_pos_emb
config.scale_alpha_value = shared.args.alpha_value
model = ExLlamaV2(config)
split = None

View File

@ -116,7 +116,7 @@ class Exllamav2HF(PreTrainedModel):
config.prepare()
config.max_seq_len = shared.args.max_seq_len
config.rope_scale = shared.args.compress_pos_emb
config.rope_alpha = shared.args.alpha_value
config.scale_pos_emb = shared.args.compress_pos_emb
config.scale_alpha_value = shared.args.alpha_value
return Exllamav2HF(config)