From c9a9f63d1b9c4bd8a08dcef519728cd82b5dc4da Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:05:51 -0700 Subject: [PATCH] Fix llama.cpp loader not being random (thanks @reydeljuego12345) --- modules/llamacpp_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/llamacpp_model.py b/modules/llamacpp_model.py index a16230ca..96f7ed56 100644 --- a/modules/llamacpp_model.py +++ b/modules/llamacpp_model.py @@ -136,7 +136,7 @@ class LlamaCppModel: prompt=prompt, max_tokens=state['max_new_tokens'], temperature=state['temperature'], - top_p=state['top_p'], + top_p=state['top_p'] if state['top_p'] < 1 else 0.999, min_p=state['min_p'], typical_p=state['typical_p'], frequency_penalty=state['frequency_penalty'],