Multiple histories for each character (#4022)

This commit is contained in:
oobabooga 2023-09-21 17:19:32 -03:00 committed by GitHub
parent 029da9563f
commit 00ab450c13
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 240 additions and 197 deletions

View File

@ -0,0 +1,4 @@
name: AI
greeting: I have the entire internet compressed into my neural network weights. What would you like to know?
context: |
The following is a conversation with an AI Large Language Model. The AI has been trained to answer questions, provide recommendations, and help with decision making. The AI follows user requests. The AI thinks outside the box.

View File

@ -244,11 +244,9 @@ if (buttonsInChat.length > 0) {
const thisButton = buttonsInChat[i]; const thisButton = buttonsInChat[i];
menu.appendChild(thisButton); menu.appendChild(thisButton);
if(i != 8) { thisButton.addEventListener("click", () => {
thisButton.addEventListener("click", () => { hideMenu();
hideMenu(); });
});
}
const buttonText = thisButton.textContent; const buttonText = thisButton.textContent;
const matches = buttonText.match(/(\(.*?\))/); const matches = buttonText.match(/(\(.*?\))/);

View File

@ -4,6 +4,7 @@ import functools
import html import html
import json import json
import re import re
from datetime import datetime
from pathlib import Path from pathlib import Path
import gradio as gr import gradio as gr
@ -297,8 +298,25 @@ def generate_chat_reply(text, state, regenerate=False, _continue=False, loading_
yield history yield history
# Same as above but returns HTML for the UI def character_is_loaded(state, raise_exception=False):
if state['mode'] in ['chat', 'chat-instruct'] and state['name2'] == '':
logger.error('It looks like no character is loaded. Please load one under Parameters > Character.')
if raise_exception:
raise ValueError
return False
else:
return True
def generate_chat_reply_wrapper(text, state, regenerate=False, _continue=False): def generate_chat_reply_wrapper(text, state, regenerate=False, _continue=False):
'''
Same as above but returns HTML for the UI
'''
if not character_is_loaded(state):
return
if state['start_with'] != '' and not _continue: if state['start_with'] != '' and not _continue:
if regenerate: if regenerate:
text, state['history'] = remove_last_message(state['history']) text, state['history'] = remove_last_message(state['history'])
@ -359,86 +377,132 @@ def send_dummy_reply(text, state):
return history return history
def clear_chat_log(state):
greeting = replace_character_names(state['greeting'], state['name1'], state['name2'])
mode = state['mode']
history = state['history']
history['visible'] = []
history['internal'] = []
if mode != 'instruct':
if greeting != '':
history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
history['visible'] += [['', apply_extensions('output', greeting, state, is_chat=True)]]
return history
def redraw_html(history, name1, name2, mode, style, reset_cache=False): def redraw_html(history, name1, name2, mode, style, reset_cache=False):
return chat_html_wrapper(history, name1, name2, mode, style, reset_cache=reset_cache) return chat_html_wrapper(history, name1, name2, mode, style, reset_cache=reset_cache)
def save_history(history, path=None): def start_new_chat(state):
p = path or Path('logs/exported_history.json') mode = state['mode']
history = {'internal': [], 'visible': []}
if mode != 'instruct':
greeting = replace_character_names(state['greeting'], state['name1'], state['name2'])
if greeting != '':
history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
history['visible'] += [['', apply_extensions('output', greeting, state, is_chat=True)]]
unique_id = datetime.now().strftime('%Y%m%d-%H-%M-%S')
save_history(history, unique_id, state['character_menu'], state['mode'])
return history
def get_history_file_path(unique_id, character, mode):
if mode == 'instruct':
p = Path(f'logs/instruct/{unique_id}.json')
else:
p = Path(f'logs/chat/{character}/{unique_id}.json')
return p
def save_history(history, unique_id, character, mode):
if shared.args.multi_user:
return
p = get_history_file_path(unique_id, character, mode)
if not p.parent.is_dir(): if not p.parent.is_dir():
p.parent.mkdir(parents=True) p.parent.mkdir(parents=True)
with open(p, 'w', encoding='utf-8') as f: with open(p, 'w', encoding='utf-8') as f:
f.write(json.dumps(history, indent=4)) f.write(json.dumps(history, indent=4))
return p
def find_all_histories(state):
if shared.args.multi_user:
return ['']
if state['mode'] == 'instruct':
paths = Path('logs/instruct').glob('*.json')
else:
character = state['character_menu']
# Handle obsolete filenames and paths
old_p = Path(f'logs/{character}_persistent.json')
new_p = Path(f'logs/persistent_{character}.json')
if old_p.exists():
logger.warning(f"Renaming {old_p} to {new_p}")
old_p.rename(new_p)
if new_p.exists():
unique_id = datetime.now().strftime('%Y%m%d-%H-%M-%S')
p = get_history_file_path(unique_id, character, state['mode'])
logger.warning(f"Moving {new_p} to {p}")
p.parent.mkdir(exist_ok=True)
new_p.rename(p)
paths = Path(f'logs/chat/{character}').glob('*.json')
histories = sorted(paths, key=lambda x: x.stat().st_mtime, reverse=True)
histories = [path.stem for path in histories]
return histories
def load_history(file, history): def load_latest_history(state):
'''
Loads the latest history for the given character in chat or chat-instruct
mode, or the latest instruct history for instruct mode.
'''
if shared.args.multi_user:
return start_new_chat(state)
histories = find_all_histories(state)
if len(histories) > 0:
unique_id = Path(histories[0]).stem
history = load_history(unique_id, state['character_menu'], state['mode'])
else:
history = start_new_chat(state)
return history
def load_history(unique_id, character, mode):
p = get_history_file_path(unique_id, character, mode)
f = json.loads(open(p, 'rb').read())
if 'internal' in f and 'visible' in f:
history = f
else:
history = {
'internal': f['data'],
'visible': f['data_visible']
}
return history
def load_history_json(file, history):
try: try:
file = file.decode('utf-8') file = file.decode('utf-8')
j = json.loads(file) f = json.loads(file)
if 'internal' in j and 'visible' in j: if 'internal' in f and 'visible' in f:
return j history = f
else: else:
return history history = {
'internal': f['data'],
'visible': f['data_visible']
}
return history
except: except:
return history return history
def save_persistent_history(history, character, mode): def delete_history(unique_id, character, mode):
if mode in ['chat', 'chat-instruct'] and character not in ['', 'None', None] and not shared.args.multi_user: p = get_history_file_path(unique_id, character, mode)
save_history(history, path=Path(f'logs/persistent_{character}.json')) delete_file(p)
def load_persistent_history(state):
if shared.session_is_loading:
shared.session_is_loading = False
return state['history']
if state['mode'] == 'instruct':
return state['history']
character = state['character_menu']
greeting = replace_character_names(state['greeting'], state['name1'], state['name2'])
should_load_history = (not shared.args.multi_user and character not in ['None', '', None])
old_p = Path(f'logs/{character}_persistent.json')
p = Path(f'logs/persistent_{character}.json')
if should_load_history and old_p.exists():
logger.warning(f"Renaming {old_p} to {p}")
old_p.rename(p)
if should_load_history and p.exists():
f = json.loads(open(p, 'rb').read())
if 'internal' in f and 'visible' in f:
history = f
else:
history = {'internal': [], 'visible': []}
history['internal'] = f['data']
history['visible'] = f['data_visible']
else:
history = {'internal': [], 'visible': []}
if greeting != "":
history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
history['visible'] += [['', apply_extensions('output', greeting, state, is_chat=True)]]
return history
def replace_character_names(text, name1, name2): def replace_character_names(text, name1, name2):
@ -465,61 +529,55 @@ def load_character(character, name1, name2, instruct=False):
greeting_field = 'greeting' greeting_field = 'greeting'
picture = None picture = None
# Delete the profile picture cache, if any
if Path("cache/pfp_character.png").exists() and not instruct:
Path("cache/pfp_character.png").unlink()
if instruct: if instruct:
name1 = name2 = '' name1 = name2 = ''
folder = 'instruction-templates' folder = 'instruction-templates'
else: else:
folder = 'characters' folder = 'characters'
if character not in ['None', '', None]: filepath = None
picture = generate_pfp_cache(character) for extension in ["yml", "yaml", "json"]:
filepath = None filepath = Path(f'{folder}/{character}.{extension}')
for extension in ["yml", "yaml", "json"]: if filepath.exists():
filepath = Path(f'{folder}/{character}.{extension}') break
if filepath.exists():
break
if filepath is None: if filepath is None or not filepath.exists():
logger.error(f"Could not find character file for {character} in {folder} folder. Please check your spelling.") logger.error(f"Could not find the character \"{character}\" inside {folder}/. No character has been loaded.")
return name1, name2, picture, greeting, context, turn_template.replace("\n", r"\n") raise ValueError
file_contents = open(filepath, 'r', encoding='utf-8').read() file_contents = open(filepath, 'r', encoding='utf-8').read()
data = json.loads(file_contents) if extension == "json" else yaml.safe_load(file_contents) data = json.loads(file_contents) if extension == "json" else yaml.safe_load(file_contents)
# Finding the bot's name if Path("cache/pfp_character.png").exists() and not instruct:
for k in ['name', 'bot', '<|bot|>', 'char_name']: Path("cache/pfp_character.png").unlink()
if k in data and data[k] != '':
name2 = data[k]
break
# Find the user name (if any) picture = generate_pfp_cache(character)
for k in ['your_name', 'user', '<|user|>']:
if k in data and data[k] != '':
name1 = data[k]
break
if 'context' in data: # Finding the bot's name
context = data['context'] for k in ['name', 'bot', '<|bot|>', 'char_name']:
if not instruct: if k in data and data[k] != '':
context = context.strip() + '\n' name2 = data[k]
elif "char_persona" in data: break
context = build_pygmalion_style_context(data)
greeting_field = 'char_greeting'
if greeting_field in data: # Find the user name (if any)
greeting = data[greeting_field] for k in ['your_name', 'user', '<|user|>']:
if k in data and data[k] != '':
name1 = data[k]
break
if 'turn_template' in data: if 'context' in data:
turn_template = data['turn_template'] context = data['context']
if not instruct:
context = context.strip() + '\n'
elif "char_persona" in data:
context = build_pygmalion_style_context(data)
greeting_field = 'char_greeting'
else: if greeting_field in data:
context = shared.settings['context'] greeting = data[greeting_field]
name2 = shared.settings['name2']
greeting = shared.settings['greeting'] if 'turn_template' in data:
turn_template = data['turn_template']
return name1, name2, picture, greeting, context, turn_template.replace("\n", r"\n") return name1, name2, picture, greeting, context, turn_template.replace("\n", r"\n")

View File

@ -24,7 +24,6 @@ processing_message = '*Is typing...*'
gradio = {} gradio = {}
persistent_interface_state = {} persistent_interface_state = {}
need_restart = False need_restart = False
session_is_loading = False
# UI defaults # UI defaults
settings = { settings = {
@ -33,7 +32,6 @@ settings = {
'start_with': '', 'start_with': '',
'mode': 'chat', 'mode': 'chat',
'chat_style': 'cai-chat', 'chat_style': 'cai-chat',
'character': 'None',
'prompt-default': 'QA', 'prompt-default': 'QA',
'prompt-notebook': 'QA', 'prompt-notebook': 'QA',
'preset': 'simple-1', 'preset': 'simple-1',
@ -54,9 +52,7 @@ settings = {
'skip_special_tokens': True, 'skip_special_tokens': True,
'stream': True, 'stream': True,
'name1': 'You', 'name1': 'You',
'name2': 'Assistant', 'character': 'Assistant',
'context': 'This is a conversation with your Assistant. It is a computer program designed to help you with various tasks such as answering questions, providing recommendations, and helping with decision making. You can ask it anything you want and it will do its best to give you accurate and relevant information.',
'greeting': '',
'instruction_template': 'Alpaca', 'instruction_template': 'Alpaca',
'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>', 'chat-instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>',
'autoload_model': False, 'autoload_model': False,

View File

@ -12,7 +12,7 @@ from modules.utils import gradio
inputs = ('Chat input', 'interface_state') inputs = ('Chat input', 'interface_state')
reload_arr = ('history', 'name1', 'name2', 'mode', 'chat_style') reload_arr = ('history', 'name1', 'name2', 'mode', 'chat_style')
clear_arr = ('Clear history-confirm', 'Clear history', 'Clear history-cancel') clear_arr = ('delete_chat-confirm', 'delete_chat', 'delete_chat-cancel')
def create_ui(): def create_ui():
@ -23,7 +23,7 @@ def create_ui():
with gr.Tab('Chat', elem_id='chat-tab', elem_classes=("old-ui" if shared.args.chat_buttons else None)): with gr.Tab('Chat', elem_id='chat-tab', elem_classes=("old-ui" if shared.args.chat_buttons else None)):
with gr.Row(): with gr.Row():
with gr.Column(elem_id='chat-col'): with gr.Column(elem_id='chat-col'):
shared.gradio['display'] = gr.HTML(value=chat_html_wrapper({'internal': [], 'visible': []}, shared.settings['name1'], shared.settings['name2'], 'chat', 'cai-chat')) shared.gradio['display'] = gr.HTML(value=chat_html_wrapper({'internal': [], 'visible': []}, '', '', 'chat', 'cai-chat'))
with gr.Row(elem_id="chat-input-row"): with gr.Row(elem_id="chat-input-row"):
with gr.Column(scale=1, elem_id='gr-hover-container'): with gr.Column(scale=1, elem_id='gr-hover-container'):
@ -45,26 +45,34 @@ def create_ui():
shared.gradio['Regenerate'] = gr.Button('Regenerate (Ctrl + Enter)', elem_id='Regenerate') shared.gradio['Regenerate'] = gr.Button('Regenerate (Ctrl + Enter)', elem_id='Regenerate')
shared.gradio['Continue'] = gr.Button('Continue (Alt + Enter)', elem_id='Continue') shared.gradio['Continue'] = gr.Button('Continue (Alt + Enter)', elem_id='Continue')
shared.gradio['Remove last'] = gr.Button('Remove last reply (Ctrl + Shift + Backspace)', elem_id='Remove-last') shared.gradio['Remove last'] = gr.Button('Remove last reply (Ctrl + Shift + Backspace)', elem_id='Remove-last')
with gr.Row(): with gr.Row():
shared.gradio['Replace last reply'] = gr.Button('Replace last reply (Ctrl + Shift + L)', elem_id='Replace-last') shared.gradio['Replace last reply'] = gr.Button('Replace last reply (Ctrl + Shift + L)', elem_id='Replace-last')
shared.gradio['Copy last reply'] = gr.Button('Copy last reply (Ctrl + Shift + K)', elem_id='Copy-last') shared.gradio['Copy last reply'] = gr.Button('Copy last reply (Ctrl + Shift + K)', elem_id='Copy-last')
shared.gradio['Impersonate'] = gr.Button('Impersonate (Ctrl + Shift + M)', elem_id='Impersonate') shared.gradio['Impersonate'] = gr.Button('Impersonate (Ctrl + Shift + M)', elem_id='Impersonate')
with gr.Row(): with gr.Row():
shared.gradio['Send dummy message'] = gr.Button('Send dummy message') shared.gradio['Send dummy message'] = gr.Button('Send dummy message')
shared.gradio['Send dummy reply'] = gr.Button('Send dummy reply') shared.gradio['Send dummy reply'] = gr.Button('Send dummy reply')
with gr.Row(): with gr.Row():
shared.gradio['Clear history'] = gr.Button('Clear history') shared.gradio['Start new chat'] = gr.Button('Start new chat')
shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False)
shared.gradio['Clear history-confirm'] = gr.Button('Confirm', variant='stop', visible=False, elem_id='clear-history-confirm')
with gr.Row(): with gr.Row():
shared.gradio['send-chat-to-default'] = gr.Button('Send to default') shared.gradio['send-chat-to-default'] = gr.Button('Send to default')
shared.gradio['send-chat-to-notebook'] = gr.Button('Send to notebook') shared.gradio['send-chat-to-notebook'] = gr.Button('Send to notebook')
with gr.Row():
shared.gradio['unique_id'] = gr.Dropdown(label='Past chats', elem_classes=['slim-dropdown'])
shared.gradio['delete_chat'] = gr.Button('🗑️', elem_classes='refresh-button')
shared.gradio['delete_chat-cancel'] = gr.Button('Cancel', visible=False, elem_classes='refresh-button')
shared.gradio['delete_chat-confirm'] = gr.Button('Confirm', variant='stop', visible=False, elem_classes='refresh-button')
with gr.Row(): with gr.Row():
shared.gradio['start_with'] = gr.Textbox(label='Start reply with', placeholder='Sure thing!', value=shared.settings['start_with']) shared.gradio['start_with'] = gr.Textbox(label='Start reply with', placeholder='Sure thing!', value=shared.settings['start_with'])
with gr.Row(): with gr.Row():
shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value=shared.settings['mode'] if shared.settings['mode'] in ['chat', 'instruct', 'chat-instruct'] else 'chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under Parameters > Instruction template must match the current model.', elem_id='chat-mode') shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value='chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under Parameters > Instruction template must match the current model.', elem_id='chat-mode')
shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct') shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct')
@ -73,15 +81,15 @@ def create_chat_settings_ui():
with gr.Row(): with gr.Row():
with gr.Column(scale=8): with gr.Column(scale=8):
with gr.Row(): with gr.Row():
shared.gradio['character_menu'] = gr.Dropdown(value='None', choices=utils.get_available_characters(), label='Character', elem_id='character-menu', info='Used in chat and chat-instruct modes.', elem_classes='slim-dropdown') shared.gradio['character_menu'] = gr.Dropdown(value='', choices=utils.get_available_characters(), label='Character', elem_id='character-menu', info='Used in chat and chat-instruct modes.', elem_classes='slim-dropdown')
ui.create_refresh_button(shared.gradio['character_menu'], lambda: None, lambda: {'choices': utils.get_available_characters()}, 'refresh-button') ui.create_refresh_button(shared.gradio['character_menu'], lambda: None, lambda: {'choices': utils.get_available_characters()}, 'refresh-button')
shared.gradio['save_character'] = gr.Button('💾', elem_classes='refresh-button') shared.gradio['save_character'] = gr.Button('💾', elem_classes='refresh-button')
shared.gradio['delete_character'] = gr.Button('🗑️', elem_classes='refresh-button') shared.gradio['delete_character'] = gr.Button('🗑️', elem_classes='refresh-button')
shared.gradio['name1'] = gr.Textbox(value=shared.settings['name1'], lines=1, label='Your name') shared.gradio['name1'] = gr.Textbox(value=shared.settings['name1'], lines=1, label='Your name')
shared.gradio['name2'] = gr.Textbox(value=shared.settings['name2'], lines=1, label='Character\'s name') shared.gradio['name2'] = gr.Textbox(value='', lines=1, label='Character\'s name')
shared.gradio['context'] = gr.Textbox(value=shared.settings['context'], lines=10, label='Context', elem_classes=['add_scrollbar']) shared.gradio['context'] = gr.Textbox(value='', lines=10, label='Context', elem_classes=['add_scrollbar'])
shared.gradio['greeting'] = gr.Textbox(value=shared.settings['greeting'], lines=5, label='Greeting', elem_classes=['add_scrollbar']) shared.gradio['greeting'] = gr.Textbox(value='', lines=5, label='Greeting', elem_classes=['add_scrollbar'])
with gr.Column(scale=1): with gr.Column(scale=1):
shared.gradio['character_picture'] = gr.Image(label='Character picture', type='pil') shared.gradio['character_picture'] = gr.Image(label='Character picture', type='pil')
@ -146,7 +154,7 @@ def create_event_handlers():
lambda x: (x, ''), gradio('textbox'), gradio('Chat input', 'textbox'), show_progress=False).then( lambda x: (x, ''), gradio('textbox'), gradio('Chat input', 'textbox'), show_progress=False).then(
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then( chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None).then( chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}') lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
shared.gradio['textbox'].submit( shared.gradio['textbox'].submit(
@ -154,21 +162,21 @@ def create_event_handlers():
lambda x: (x, ''), gradio('textbox'), gradio('Chat input', 'textbox'), show_progress=False).then( lambda x: (x, ''), gradio('textbox'), gradio('Chat input', 'textbox'), show_progress=False).then(
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then( chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None).then( chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}') lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
shared.gradio['Regenerate'].click( shared.gradio['Regenerate'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_reply_wrapper, regenerate=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then( partial(chat.generate_chat_reply_wrapper, regenerate=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None).then( chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}') lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
shared.gradio['Continue'].click( shared.gradio['Continue'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_reply_wrapper, _continue=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then( partial(chat.generate_chat_reply_wrapper, _continue=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None).then( chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}') lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
shared.gradio['Impersonate'].click( shared.gradio['Impersonate'].click(
@ -183,60 +191,81 @@ def create_event_handlers():
chat.replace_last_reply, gradio('textbox', 'interface_state'), gradio('history')).then( chat.replace_last_reply, gradio('textbox', 'interface_state'), gradio('history')).then(
lambda: '', None, gradio('textbox'), show_progress=False).then( lambda: '', None, gradio('textbox'), show_progress=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then( chat.redraw_html, gradio(reload_arr), gradio('display')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None) chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None)
shared.gradio['Send dummy message'].click( shared.gradio['Send dummy message'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.send_dummy_message, gradio('textbox', 'interface_state'), gradio('history')).then( chat.send_dummy_message, gradio('textbox', 'interface_state'), gradio('history')).then(
lambda: '', None, gradio('textbox'), show_progress=False).then( lambda: '', None, gradio('textbox'), show_progress=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then( chat.redraw_html, gradio(reload_arr), gradio('display')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None) chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None)
shared.gradio['Send dummy reply'].click( shared.gradio['Send dummy reply'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.send_dummy_reply, gradio('textbox', 'interface_state'), gradio('history')).then( chat.send_dummy_reply, gradio('textbox', 'interface_state'), gradio('history')).then(
lambda: '', None, gradio('textbox'), show_progress=False).then( lambda: '', None, gradio('textbox'), show_progress=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then( chat.redraw_html, gradio(reload_arr), gradio('display')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None) chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None)
shared.gradio['Clear history'].click(lambda: [gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, gradio(clear_arr))
shared.gradio['Clear history-cancel'].click(lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, gradio(clear_arr))
shared.gradio['Clear history-confirm'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, gradio(clear_arr)).then(
chat.clear_chat_log, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None)
shared.gradio['Remove last'].click( shared.gradio['Remove last'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then( ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.remove_last_message, gradio('history'), gradio('textbox', 'history'), show_progress=False).then( chat.remove_last_message, gradio('history'), gradio('textbox', 'history'), show_progress=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then( chat.redraw_html, gradio(reload_arr), gradio('display')).then(
chat.save_persistent_history, gradio('history', 'character_menu', 'mode'), None) chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None)
shared.gradio['character_menu'].change(
partial(chat.load_character, instruct=False), gradio('character_menu', 'name1', 'name2'), gradio('name1', 'name2', 'character_picture', 'greeting', 'context', 'dummy')).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.load_persistent_history, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display'))
shared.gradio['Stop'].click( shared.gradio['Stop'].click(
stop_everything_event, None, None, queue=False).then( stop_everything_event, None, None, queue=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')) chat.redraw_html, gradio(reload_arr), gradio('display'))
if not shared.args.multi_user:
shared.gradio['unique_id'].select(
chat.load_history, gradio('unique_id', 'character_menu', 'mode'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display'))
shared.gradio['Start new chat'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.start_new_chat, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id'))
shared.gradio['delete_chat'].click(lambda: [gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, gradio(clear_arr))
shared.gradio['delete_chat-cancel'].click(lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, gradio(clear_arr))
shared.gradio['delete_chat-confirm'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.delete_history, gradio('unique_id', 'character_menu', 'mode'), None).then(
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, gradio(clear_arr))
shared.gradio['load_chat_history'].upload(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.start_new_chat, gradio('interface_state'), gradio('history')).then(
chat.load_history_json, gradio('load_chat_history', 'history'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')
shared.gradio['character_menu'].change(
partial(chat.load_character, instruct=False), gradio('character_menu', 'name1', 'name2'), gradio('name1', 'name2', 'character_picture', 'greeting', 'context', 'dummy')).success(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id'))
shared.gradio['mode'].change( shared.gradio['mode'].change(
lambda x: gr.update(visible=x != 'instruct'), gradio('mode'), gradio('chat_style'), show_progress=False).then( lambda x: gr.update(visible=x != 'instruct'), gradio('mode'), gradio('chat_style'), show_progress=False).then(
chat.redraw_html, gradio(reload_arr), gradio('display')) ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.character_is_loaded, raise_exception=True), gradio('interface_state'), None).success(
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id'))
shared.gradio['chat_style'].change(chat.redraw_html, gradio(reload_arr), gradio('display')) shared.gradio['chat_style'].change(chat.redraw_html, gradio(reload_arr), gradio('display'))
shared.gradio['instruction_template'].change( shared.gradio['instruction_template'].change(
partial(chat.load_character, instruct=True), gradio('instruction_template', 'name1_instruct', 'name2_instruct'), gradio('name1_instruct', 'name2_instruct', 'dummy', 'dummy', 'context_instruct', 'turn_template')) partial(chat.load_character, instruct=True), gradio('instruction_template', 'name1_instruct', 'name2_instruct'), gradio('name1_instruct', 'name2_instruct', 'dummy', 'dummy', 'context_instruct', 'turn_template'))
shared.gradio['load_chat_history'].upload(
chat.load_history, gradio('load_chat_history', 'history'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')
shared.gradio['Copy last reply'].click(chat.send_last_reply_to_input, gradio('history'), gradio('textbox'), show_progress=False) shared.gradio['Copy last reply'].click(chat.send_last_reply_to_input, gradio('history'), gradio('textbox'), show_progress=False)
# Save/delete a character # Save/delete a character

View File

@ -1,9 +1,6 @@
import copy
import json
import gradio as gr import gradio as gr
from modules import chat, presets, shared, ui, ui_chat, utils from modules import chat, presets, shared, ui, utils
from modules.utils import gradio from modules.utils import gradio
@ -76,35 +73,3 @@ def create_event_handlers():
lambda x: f'{x}.yaml', gradio('preset_menu'), gradio('delete_filename')).then( lambda x: f'{x}.yaml', gradio('preset_menu'), gradio('delete_filename')).then(
lambda: 'presets/', None, gradio('delete_root')).then( lambda: 'presets/', None, gradio('delete_root')).then(
lambda: gr.update(visible=True), None, gradio('file_deleter')) lambda: gr.update(visible=True), None, gradio('file_deleter'))
if not shared.args.multi_user:
shared.gradio['save_session'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
save_session, gradio('interface_state'), gradio('temporary_text')).then(
None, gradio('temporary_text'), None, _js=f"(contents) => {{{ui.save_files_js}; saveSession(contents)}}")
shared.gradio['load_session'].upload(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
load_session, gradio('load_session', 'interface_state'), gradio('interface_state')).then(
ui.apply_interface_values, gradio('interface_state'), gradio(ui.list_interface_input_elements()), show_progress=False).then(
chat.redraw_html, gradio(ui_chat.reload_arr), gradio('display')).then(
None, None, None, _js='() => {alert("The session has been loaded.")}')
def load_session(file, state):
decoded_file = file if isinstance(file, str) else file.decode('utf-8')
data = json.loads(decoded_file)
if 'character_menu' in data and state.get('character_menu') != data.get('character_menu'):
shared.session_is_loading = True
state.update(data)
return state
def save_session(state):
output = copy.deepcopy(state)
for key in ['prompt_menu-default', 'prompt_menu-notebook']:
del output[key]
return json.dumps(output, indent=4)

View File

@ -22,10 +22,6 @@ def create_ui():
shared.gradio['bool_menu'] = gr.CheckboxGroup(choices=get_boolean_arguments(), value=get_boolean_arguments(active=True), label="Boolean command-line flags", elem_classes='checkboxgroup-table') shared.gradio['bool_menu'] = gr.CheckboxGroup(choices=get_boolean_arguments(), value=get_boolean_arguments(active=True), label="Boolean command-line flags", elem_classes='checkboxgroup-table')
with gr.Column(): with gr.Column():
if not shared.args.multi_user:
shared.gradio['save_session'] = gr.Button('Save session')
shared.gradio['load_session'] = gr.File(type='binary', file_types=['.json'], label="Upload Session JSON")
extension_name = gr.Textbox(lines=1, label='Install or update an extension', info='Enter the GitHub URL below and press Enter. For a list of extensions, see: https://github.com/oobabooga/text-generation-webui-extensions ⚠️ WARNING ⚠️ : extensions can execute arbitrary code. Make sure to inspect their source code before activating them.') extension_name = gr.Textbox(lines=1, label='Install or update an extension', info='Enter the GitHub URL below and press Enter. For a list of extensions, see: https://github.com/oobabooga/text-generation-webui-extensions ⚠️ WARNING ⚠️ : extensions can execute arbitrary code. Make sure to inspect their source code before activating them.')
extension_status = gr.Markdown() extension_status = gr.Markdown()

View File

@ -94,7 +94,7 @@ def get_available_prompts():
def get_available_characters(): def get_available_characters():
paths = (x for x in Path('characters').iterdir() if x.suffix in ('.json', '.yaml', '.yml')) paths = (x for x in Path('characters').iterdir() if x.suffix in ('.json', '.yaml', '.yml'))
return ['None'] + sorted(set((k.stem for k in paths)), key=natural_keys) return sorted(set((k.stem for k in paths)), key=natural_keys)
def get_available_instruction_templates(): def get_available_instruction_templates():

View File

@ -3,7 +3,6 @@ show_controls: true
start_with: '' start_with: ''
mode: chat mode: chat
chat_style: cai-chat chat_style: cai-chat
character: None
prompt-default: QA prompt-default: QA
prompt-notebook: QA prompt-notebook: QA
preset: simple-1 preset: simple-1
@ -24,9 +23,7 @@ add_bos_token: true
skip_special_tokens: true skip_special_tokens: true
stream: true stream: true
name1: You name1: You
name2: Assistant character: Assistant
context: This is a conversation with your Assistant. It is a computer program designed to help you with various tasks such as answering questions, providing recommendations, and helping with decision making. You can ask it anything you want and it will do its best to give you accurate and relevant information.
greeting: ''
instruction_template: Alpaca instruction_template: Alpaca
chat-instruct_command: |- chat-instruct_command: |-
Continue the chat dialogue below. Write a single reply for the character "<|character|>". Continue the chat dialogue below. Write a single reply for the character "<|character|>".