mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-26 09:40:20 +01:00
Add "Impersonate" button
This commit is contained in:
parent
ad148571f4
commit
00707a0b3b
76
server.py
76
server.py
@ -339,7 +339,7 @@ if args.chat or args.cai_chat:
|
|||||||
text = text.strip()
|
text = text.strip()
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def generate_chat_prompt(text, tokens, name1, name2, context, history_size):
|
def generate_chat_prompt(text, tokens, name1, name2, context, history_size, impersonate=False):
|
||||||
text = clean_chat_message(text)
|
text = clean_chat_message(text)
|
||||||
|
|
||||||
rows = [f"{context.strip()}\n"]
|
rows = [f"{context.strip()}\n"]
|
||||||
@ -354,16 +354,44 @@ if args.chat or args.cai_chat:
|
|||||||
i -= 1
|
i -= 1
|
||||||
if history_size != 0 and count >= history_size:
|
if history_size != 0 and count >= history_size:
|
||||||
break
|
break
|
||||||
rows.append(f"{name1}: {text}\n")
|
|
||||||
rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
|
|
||||||
|
|
||||||
while len(rows) > 3 and len(encode(''.join(rows), tokens)[0]) >= 2048-tokens:
|
if not impersonate:
|
||||||
|
rows.append(f"{name1}: {text}\n")
|
||||||
|
rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
|
||||||
|
limit = 3
|
||||||
|
else:
|
||||||
|
rows.append(f"{name1}:")
|
||||||
|
limit = 2
|
||||||
|
|
||||||
|
while len(rows) > limit and len(encode(''.join(rows), tokens)[0]) >= 2048-tokens:
|
||||||
rows.pop(1)
|
rows.pop(1)
|
||||||
rows.pop(1)
|
rows.pop(1)
|
||||||
|
|
||||||
question = ''.join(rows)
|
question = ''.join(rows)
|
||||||
return question
|
return question
|
||||||
|
|
||||||
|
def extract_message_from_reply(question, reply, current, other, check, extensions=False):
|
||||||
|
next_character_found = False
|
||||||
|
previous_idx = [m.start() for m in re.finditer(f"(^|\n){current}:", question)]
|
||||||
|
idx = [m.start() for m in re.finditer(f"(^|\n){current}:", reply)]
|
||||||
|
idx = idx[len(previous_idx)-1]
|
||||||
|
|
||||||
|
if extensions:
|
||||||
|
reply = reply[idx + 1 + len(apply_extensions(f"{current}:", "bot_prefix")):]
|
||||||
|
else:
|
||||||
|
reply = reply[idx + 1 + len(f"{current}:"):]
|
||||||
|
|
||||||
|
if check:
|
||||||
|
reply = reply.split('\n')[0].strip()
|
||||||
|
else:
|
||||||
|
idx = reply.find(f"\n{other}:")
|
||||||
|
if idx != -1:
|
||||||
|
reply = reply[:idx]
|
||||||
|
next_character_found = True
|
||||||
|
reply = clean_chat_message(reply)
|
||||||
|
|
||||||
|
return reply, next_character_found
|
||||||
|
|
||||||
def chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
def chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
||||||
original_text = text
|
original_text = text
|
||||||
text = apply_extensions(text, "input")
|
text = apply_extensions(text, "input")
|
||||||
@ -372,39 +400,21 @@ if args.chat or args.cai_chat:
|
|||||||
history['visible'].append(['', ''])
|
history['visible'].append(['', ''])
|
||||||
eos_token = '\n' if check else None
|
eos_token = '\n' if check else None
|
||||||
for reply in generate_reply(question, tokens, inference_settings, selected_model, eos_token=eos_token, stopping_string=f"\n{name1}:"):
|
for reply in generate_reply(question, tokens, inference_settings, selected_model, eos_token=eos_token, stopping_string=f"\n{name1}:"):
|
||||||
next_character_found = False
|
reply, next_character_found = extract_message_from_reply(question, reply, name2, name1, check, extensions=True)
|
||||||
|
|
||||||
previous_idx = [m.start() for m in re.finditer(f"(^|\n){name2}:", question)]
|
|
||||||
idx = [m.start() for m in re.finditer(f"(^|\n){name2}:", reply)]
|
|
||||||
idx = idx[len(previous_idx)-1]
|
|
||||||
|
|
||||||
reply = reply[idx + 1 + len(apply_extensions(f"{name2}:", "bot_prefix")):]
|
|
||||||
if check:
|
|
||||||
reply = reply.split('\n')[0].strip()
|
|
||||||
else:
|
|
||||||
idx = reply.find(f"\n{name1}:")
|
|
||||||
if idx != -1:
|
|
||||||
reply = reply[:idx]
|
|
||||||
next_character_found = True
|
|
||||||
reply = clean_chat_message(reply)
|
|
||||||
|
|
||||||
history['internal'][-1] = [text, reply]
|
history['internal'][-1] = [text, reply]
|
||||||
history['visible'][-1] = [original_text, apply_extensions(reply, "output")]
|
history['visible'][-1] = [original_text, apply_extensions(reply, "output")]
|
||||||
|
yield history['visible']
|
||||||
if next_character_found:
|
if next_character_found:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Prevent the chat log from flashing if something like "\nYo" is generated just
|
def impersonate_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
||||||
# before "\nYou:" is completed
|
question = generate_chat_prompt(text, tokens, name1, name2, context, history_size, impersonate=True)
|
||||||
tmp = f"\n{name1}:"
|
eos_token = '\n' if check else None
|
||||||
next_character_substring_found = False
|
for reply in generate_reply(question, tokens, inference_settings, selected_model, eos_token=eos_token, stopping_string=f"\n{name2}:"):
|
||||||
for j in range(1, len(tmp)):
|
reply, next_character_found = extract_message_from_reply(question, reply, name1, name2, check, extensions=False)
|
||||||
if reply[-j:] == tmp[:j]:
|
yield apply_extensions(reply, "output")
|
||||||
next_character_substring_found = True
|
if next_character_found:
|
||||||
|
break
|
||||||
if not next_character_substring_found:
|
|
||||||
yield history['visible']
|
|
||||||
|
|
||||||
yield history['visible']
|
|
||||||
|
|
||||||
def cai_chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
def cai_chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
||||||
for _history in chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
for _history in chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check, history_size):
|
||||||
@ -614,6 +624,7 @@ if args.chat or args.cai_chat:
|
|||||||
with gr.Row():
|
with gr.Row():
|
||||||
buttons["Send last reply to input"] = gr.Button("Send last reply to input")
|
buttons["Send last reply to input"] = gr.Button("Send last reply to input")
|
||||||
buttons["Replace last reply"] = gr.Button("Replace last reply")
|
buttons["Replace last reply"] = gr.Button("Replace last reply")
|
||||||
|
buttons["Impersonate"] = gr.Button("Impersonate")
|
||||||
|
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
with gr.Column():
|
with gr.Column():
|
||||||
@ -674,6 +685,7 @@ if args.chat or args.cai_chat:
|
|||||||
|
|
||||||
buttons["Send last reply to input"].click(send_last_reply_to_input, [], textbox, show_progress=args.no_stream)
|
buttons["Send last reply to input"].click(send_last_reply_to_input, [], textbox, show_progress=args.no_stream)
|
||||||
buttons["Replace last reply"].click(replace_last_reply, [textbox, name1, name2], display, show_progress=args.no_stream)
|
buttons["Replace last reply"].click(replace_last_reply, [textbox, name1, name2], display, show_progress=args.no_stream)
|
||||||
|
buttons["Impersonate"].click(impersonate_wrapper, input_params, textbox, show_progress=args.no_stream)
|
||||||
buttons["Clear"].click(clear_chat_log, [character_menu, name1, name2], display)
|
buttons["Clear"].click(clear_chat_log, [character_menu, name1, name2], display)
|
||||||
buttons["Remove last"].click(remove_last_message, [name1, name2], [display, textbox], show_progress=False)
|
buttons["Remove last"].click(remove_last_message, [name1, name2], [display, textbox], show_progress=False)
|
||||||
buttons["Stop"].click(None, None, None, cancels=gen_events)
|
buttons["Stop"].click(None, None, None, cancels=gen_events)
|
||||||
|
Loading…
Reference in New Issue
Block a user