From 1878acd9f3b880ba7983d30844790c5ff823fee4 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Sat, 25 Feb 2023 09:30:59 -0300 Subject: [PATCH] Minor bug fix in chat --- modules/chat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/chat.py b/modules/chat.py index 3c8b4dbf..787fb46f 100644 --- a/modules/chat.py +++ b/modules/chat.py @@ -114,7 +114,7 @@ def chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical prompt = custom_prompt_generator(text, max_new_tokens, name1, name2, context, chat_prompt_size) # Generate - reply = ' ' + reply = '' for i in range(chat_generation_attempts): for reply in generate_reply(prompt+reply, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, eos_token=eos_token, stopping_string=f"\n{name1}:"): @@ -149,7 +149,7 @@ def impersonate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typ prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=True) - reply = ' ' + reply = '' for i in range(chat_generation_attempts): for reply in generate_reply(prompt+reply, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, eos_token=eos_token, stopping_string=f"\n{name2}:"): reply, next_character_found, substring_found = extract_message_from_reply(prompt, reply, name1, name2, check, extensions=False)