Fix llama.cpp loader not being random (thanks @reydeljuego12345)

This commit is contained in:
oobabooga 2024-10-14 13:05:51 -07:00
parent 49dfa0adaf
commit c9a9f63d1b

View File

@ -136,7 +136,7 @@ class LlamaCppModel:
prompt=prompt,
max_tokens=state['max_new_tokens'],
temperature=state['temperature'],
top_p=state['top_p'],
top_p=state['top_p'] if state['top_p'] < 1 else 0.999,
min_p=state['min_p'],
typical_p=state['typical_p'],
frequency_penalty=state['frequency_penalty'],