From cb169d0834982f1afe37e812d384f09e4b19ccc8 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Sat, 8 Apr 2023 17:34:07 -0300 Subject: [PATCH] Minor formatting changes --- modules/text_generation.py | 4 ---- server.py | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/modules/text_generation.py b/modules/text_generation.py index 80bb34d2..acae1007 100644 --- a/modules/text_generation.py +++ b/modules/text_generation.py @@ -63,8 +63,6 @@ def generate_softprompt_input_tensors(input_ids): return inputs_embeds, filler_input_ids # Removes empty replies from gpt4chan outputs - - def fix_gpt4chan(s): for i in range(10): s = re.sub("--- [0-9]*\n>>[0-9]*\n---", "---", s) @@ -73,8 +71,6 @@ def fix_gpt4chan(s): return s # Fix the LaTeX equations in galactica - - def fix_galactica(s): s = s.replace(r'\[', r'$') s = s.replace(r'\]', r'$') diff --git a/server.py b/server.py index 5d004101..740020ea 100644 --- a/server.py +++ b/server.py @@ -398,13 +398,13 @@ def create_interface(): reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']] gen_events.append(shared.gradio['Generate'].click( - lambda x : (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( + lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( lambda: chat.save_history(timestamp=False), [], [], show_progress=False) ) gen_events.append(shared.gradio['textbox'].submit( - lambda x : (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( + lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( lambda: chat.save_history(timestamp=False), [], [], show_progress=False) )