Add "Start reply with" feature to chat mode

This commit is contained in:
oobabooga 2023-06-02 13:58:08 -03:00
parent f7b07c4705
commit 19f78684e6
4 changed files with 17 additions and 6 deletions

View File

@ -277,7 +277,7 @@ def chatbot_wrapper(text, history, state, regenerate=False, _continue=False, loa
yield output yield output
def impersonate_wrapper(text, state): def impersonate_wrapper(text, start_with, state):
if shared.model_name == 'None' or shared.model is None: if shared.model_name == 'None' or shared.model is None:
logger.error("No model is loaded! Select one in the Model tab.") logger.error("No model is loaded! Select one in the Model tab.")
yield '' yield ''
@ -322,8 +322,13 @@ def generate_chat_reply(text, history, state, regenerate=False, _continue=False,
yield history yield history
# Same as above but returns HTML # Same as above but returns HTML for the UI
def generate_chat_reply_wrapper(text, state, regenerate=False, _continue=False): def generate_chat_reply_wrapper(text, start_with, state, regenerate=False, _continue=False):
if start_with != '' and _continue == False:
_continue = True
send_dummy_message(text)
send_dummy_reply(start_with)
for i, history in enumerate(generate_chat_reply(text, shared.history, state, regenerate, _continue, loading_message=True)): for i, history in enumerate(generate_chat_reply(text, shared.history, state, regenerate, _continue, loading_message=True)):
if i != 0: if i != 0:
shared.history = copy.deepcopy(history) shared.history = copy.deepcopy(history)

View File

@ -55,6 +55,7 @@ settings = {
'truncation_length_min': 0, 'truncation_length_min': 0,
'truncation_length_max': 8192, 'truncation_length_max': 8192,
'mode': 'chat', 'mode': 'chat',
'start_with': '',
'chat_style': 'cai-chat', 'chat_style': 'cai-chat',
'instruction_template': 'None', 'instruction_template': 'None',
'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>', 'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>',

View File

@ -626,8 +626,12 @@ def create_interface():
shared.gradio['Clear history-confirm'] = gr.Button('Confirm', variant='stop', visible=False) shared.gradio['Clear history-confirm'] = gr.Button('Confirm', variant='stop', visible=False)
shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False) shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False)
shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value=shared.settings['mode'] if shared.settings['mode'] in ['chat', 'instruct', 'chat-instruct'] else 'chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under "Chat settings" must match the current model.') with gr.Row():
shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct') shared.gradio['start_with'] = gr.Textbox(label='Start reply with', placeholder='Sure thing!', value=shared.settings['start_with'])
with gr.Row():
shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value=shared.settings['mode'] if shared.settings['mode'] in ['chat', 'instruct', 'chat-instruct'] else 'chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under "Chat settings" must match the current model.')
shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct')
with gr.Tab('Chat settings', elem_id='chat-settings'): with gr.Tab('Chat settings', elem_id='chat-settings'):
with gr.Row(): with gr.Row():
@ -825,7 +829,7 @@ def create_interface():
# chat mode event handlers # chat mode event handlers
if shared.is_chat(): if shared.is_chat():
shared.input_params = [shared.gradio[k] for k in ['Chat input', 'interface_state']] shared.input_params = [shared.gradio[k] for k in ['Chat input', 'start_with', 'interface_state']]
clear_arr = [shared.gradio[k] for k in ['Clear history-confirm', 'Clear history', 'Clear history-cancel']] clear_arr = [shared.gradio[k] for k in ['Clear history-confirm', 'Clear history', 'Clear history-cancel']]
shared.reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'mode', 'chat_style']] shared.reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'mode', 'chat_style']]

View File

@ -22,6 +22,7 @@ truncation_length: 2048
truncation_length_min: 0 truncation_length_min: 0
truncation_length_max: 8192 truncation_length_max: 8192
mode: chat mode: chat
start_with: ''
chat_style: cai-chat chat_style: cai-chat
instruction_template: None instruction_template: None
chat-instruct_command: 'Continue the chat dialogue below. Write a single reply for chat-instruct_command: 'Continue the chat dialogue below. Write a single reply for