mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 23:57:58 +01:00
Add --character flag, add character to settings.json
This commit is contained in:
parent
0c32ae27cc
commit
b6af2e56a2
@ -188,6 +188,7 @@ Optionally, you can use the following command-line flags:
|
||||
| `-h`, `--help` | Show this help message and exit. |
|
||||
| `--notebook` | Launch the web UI in notebook mode, where the output is written to the same text box as the input. |
|
||||
| `--chat` | Launch the web UI in chat mode. |
|
||||
| `--character CHARACTER` | The name of the character to load in chat mode by default. |
|
||||
| `--model MODEL` | Name of the model to load by default. |
|
||||
| `--lora LORA` | Name of the LoRA to apply to the model by default. |
|
||||
| `--model-dir MODEL_DIR` | Path to directory with all the models. |
|
||||
|
@ -34,6 +34,7 @@ settings = {
|
||||
'max_new_tokens_min': 1,
|
||||
'max_new_tokens_max': 2000,
|
||||
'seed': -1,
|
||||
'character': 'None',
|
||||
'name1': 'You',
|
||||
'name2': 'Assistant',
|
||||
'context': 'This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.',
|
||||
@ -93,6 +94,7 @@ parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpForma
|
||||
parser.add_argument('--notebook', action='store_true', help='Launch the web UI in notebook mode, where the output is written to the same text box as the input.')
|
||||
parser.add_argument('--chat', action='store_true', help='Launch the web UI in chat mode with a style similar to the Character.AI website.')
|
||||
parser.add_argument('--cai-chat', action='store_true', help='DEPRECATED: use --chat instead.')
|
||||
parser.add_argument('--character', type=str, help='The name of the character to load in chat mode by default.')
|
||||
parser.add_argument('--model', type=str, help='Name of the model to load by default.')
|
||||
parser.add_argument('--lora', type=str, help='Name of the LoRA to apply to the model by default.')
|
||||
parser.add_argument("--model-dir", type=str, default='models/', help="Path to directory with all the models")
|
||||
|
@ -60,11 +60,11 @@ def apply_interface_values(state, use_persistent=False):
|
||||
else:
|
||||
if use_persistent and 'mode' in state:
|
||||
if state['mode'] == 'instruct':
|
||||
return [state[k] if k not in ['character_menu'] else gr.update() for k in elements]
|
||||
return [state[k] if (k not in ['character_menu'] and k in state) else gr.update() for k in elements]
|
||||
else:
|
||||
return [state[k] if k not in ['instruction_template'] else gr.update() for k in elements]
|
||||
return [state[k] if (k not in ['instruction_template'] and k in state) else gr.update() for k in elements]
|
||||
else:
|
||||
return [state[k] for k in elements]
|
||||
return [state[k] if k in state else gr.update() for k in elements]
|
||||
|
||||
|
||||
class ToolButton(gr.Button, gr.components.FormComponent):
|
||||
|
16
server.py
16
server.py
@ -544,7 +544,7 @@ def create_interface():
|
||||
shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False)
|
||||
|
||||
shared.gradio['mode'] = gr.Radio(choices=['cai-chat', 'chat', 'instruct'], value=shared.settings['mode'], label='Mode')
|
||||
shared.gradio['instruction_template'] = gr.Dropdown(choices=get_available_instruction_templates(), label='Instruction template', value=shared.settings['instruction_template'], visible=shared.settings['mode'] == 'instruct', info='Change this according to the model/LoRA that you are using.')
|
||||
shared.gradio['instruction_template'] = gr.Dropdown(choices=get_available_instruction_templates(), label='Instruction template', value='None', visible=shared.settings['mode'] == 'instruct', info='Change this according to the model/LoRA that you are using.')
|
||||
|
||||
with gr.Tab('Character', elem_id='chat-settings'):
|
||||
with gr.Row():
|
||||
@ -560,7 +560,7 @@ def create_interface():
|
||||
shared.gradio['your_picture'] = gr.Image(label='Your picture', type='pil', value=Image.open(Path('cache/pfp_me.png')) if Path('cache/pfp_me.png').exists() else None)
|
||||
|
||||
with gr.Row():
|
||||
shared.gradio['character_menu'] = gr.Dropdown(choices=get_available_characters(), value='None', label='Character', elem_id='character-menu')
|
||||
shared.gradio['character_menu'] = gr.Dropdown(choices=get_available_characters(), label='Character', elem_id='character-menu')
|
||||
ui.create_refresh_button(shared.gradio['character_menu'], lambda: None, lambda: {'choices': get_available_characters()}, 'refresh-button')
|
||||
|
||||
with gr.Row():
|
||||
@ -794,11 +794,7 @@ def create_interface():
|
||||
shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2', 'mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
||||
shared.gradio['upload_img_tavern'].upload(chat.upload_tavern_character, [shared.gradio['upload_img_tavern'], shared.gradio['name1'], shared.gradio['name2']], [shared.gradio['character_menu']])
|
||||
shared.gradio['your_picture'].change(chat.upload_your_profile_picture, [shared.gradio[k] for k in ['your_picture', 'name1', 'name2', 'mode']], shared.gradio['display'])
|
||||
|
||||
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js+ui.chat_js}}}")
|
||||
shared.gradio['interface'].load(chat.load_character, [shared.gradio[k] for k in ['instruction_template', 'name1', 'name2', 'mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
||||
shared.gradio['interface'].load(chat.load_default_history, [shared.gradio[k] for k in ['name1', 'name2']], None)
|
||||
shared.gradio['interface'].load(chat.redraw_html, reload_inputs, shared.gradio['display'], show_progress=True)
|
||||
|
||||
# notebook/default modes event handlers
|
||||
else:
|
||||
@ -919,6 +915,14 @@ if __name__ == "__main__":
|
||||
if shared.args.lora:
|
||||
add_lora_to_model([shared.args.lora])
|
||||
|
||||
# Force a character to be loaded
|
||||
if shared.is_chat():
|
||||
shared.persistent_interface_state.update({
|
||||
'mode': shared.settings['mode'],
|
||||
'character_menu': shared.args.character or shared.settings['character'],
|
||||
'instruction_template': shared.settings['instruction_template']
|
||||
})
|
||||
|
||||
# Launch the web UI
|
||||
create_interface()
|
||||
while True:
|
||||
|
@ -3,6 +3,7 @@
|
||||
"max_new_tokens_min": 1,
|
||||
"max_new_tokens_max": 2000,
|
||||
"seed": -1,
|
||||
"character": "None",
|
||||
"name1": "You",
|
||||
"name2": "Assistant",
|
||||
"context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.",
|
||||
|
Loading…
Reference in New Issue
Block a user