Add repetition_penalty

This commit is contained in:
oobabooga 2023-03-31 14:45:17 -03:00
parent 4c27562157
commit 09b0a3aafb
2 changed files with 2 additions and 2 deletions

View File

@ -34,7 +34,7 @@ class RWKVModel:
result.pipeline = pipeline result.pipeline = pipeline
return result return result
def generate(self, context="", token_count=20, temperature=1, top_p=1, top_k=50, alpha_frequency=0.1, alpha_presence=0.1, token_ban=[0], token_stop=[], callback=None): def generate(self, context="", token_count=20, temperature=1, top_p=1, top_k=50, repetition_penalty=None, alpha_frequency=0.1, alpha_presence=0.1, token_ban=[0], token_stop=[], callback=None):
args = PIPELINE_ARGS( args = PIPELINE_ARGS(
temperature = temperature, temperature = temperature,
top_p = top_p, top_p = top_p,

View File

@ -119,7 +119,7 @@ def generate_reply(question, max_new_tokens, do_sample, temperature, top_p, typi
if any((shared.is_RWKV, shared.is_llamacpp)): if any((shared.is_RWKV, shared.is_llamacpp)):
try: try:
if shared.args.no_stream: if shared.args.no_stream:
reply = shared.model.generate(context=question, token_count=max_new_tokens, temperature=temperature, top_p=top_p, top_k=top_k) reply = shared.model.generate(context=question, token_count=max_new_tokens, temperature=temperature, top_p=top_p, top_k=top_k, repetition_penalty=repetition_penalty)
if not (shared.args.chat or shared.args.cai_chat): if not (shared.args.chat or shared.args.cai_chat):
reply = original_question + apply_extensions(reply, "output") reply = original_question + apply_extensions(reply, "output")
yield formatted_outputs(reply, shared.model_name) yield formatted_outputs(reply, shared.model_name)