mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 23:57:58 +01:00
Fix ExLlamaV2 context length setting (closes #5750)
This commit is contained in:
parent
70c58b5fc2
commit
624faa1438
@ -77,9 +77,10 @@ def get_model_metadata(model):
|
||||
# Transformers metadata
|
||||
if hf_metadata is not None:
|
||||
metadata = json.loads(open(path, 'r', encoding='utf-8').read())
|
||||
if 'max_position_embeddings' in metadata:
|
||||
model_settings['truncation_length'] = metadata['max_position_embeddings']
|
||||
model_settings['max_seq_len'] = metadata['max_position_embeddings']
|
||||
for k in ['max_position_embeddings', 'max_seq_len']:
|
||||
if k in metadata:
|
||||
model_settings['truncation_length'] = metadata[k]
|
||||
model_settings['max_seq_len'] = metadata[k]
|
||||
|
||||
if 'rope_theta' in metadata:
|
||||
model_settings['rope_freq_base'] = metadata['rope_theta']
|
||||
|
Loading…
Reference in New Issue
Block a user