text-generation-webui/modules/ui.py

246 lines
6.2 KiB
Python
Raw Normal View History

import copy
2023-03-15 16:33:26 +01:00
from pathlib import Path
import gradio as gr
import torch
import yaml
2023-04-12 15:27:06 +02:00
from modules import shared
2023-03-15 16:33:26 +01:00
with open(Path(__file__).resolve().parent / '../css/main.css', 'r') as f:
css = f.read()
2023-08-07 02:49:27 +02:00
with open(Path(__file__).resolve().parent / '../js/main.js', 'r') as f:
2023-08-13 06:12:15 +02:00
js = f.read()
2023-08-07 02:49:27 +02:00
with open(Path(__file__).resolve().parent / '../js/save_files.js', 'r') as f:
save_files_js = f.read()
with open(Path(__file__).resolve().parent / '../js/switch_tabs.js', 'r') as f:
switch_tabs_js = f.read()
with open(Path(__file__).resolve().parent / '../js/show_controls.js', 'r') as f:
show_controls_js = f.read()
refresh_symbol = '🔄'
delete_symbol = '🗑️'
save_symbol = '💾'
2023-04-19 04:36:23 +02:00
theme = gr.themes.Default(
font=['Helvetica', 'ui-sans-serif', 'system-ui', 'sans-serif'],
font_mono=['IBM Plex Mono', 'ui-monospace', 'Consolas', 'monospace'],
).set(
border_color_primary='#c5c5d2',
button_large_padding='6px 12px',
2023-04-21 07:47:18 +02:00
body_text_color_subdued='#484848',
background_fill_secondary='#eaeaea'
2023-04-19 04:36:23 +02:00
)
2023-08-07 02:49:27 +02:00
if Path("notification.mp3").exists():
audio_notification_js = "document.querySelector('#audio_notification audio')?.play();"
else:
audio_notification_js = ""
2023-05-04 02:43:17 +02:00
def list_model_elements():
elements = [
'loader',
'cpu_memory',
'auto_devices',
'disk',
'cpu',
'bf16',
'load_in_8bit',
'trust_remote_code',
'load_in_4bit',
'compute_dtype',
'quant_type',
'use_double_quant',
'wbits',
'groupsize',
'model_type',
'pre_layer',
'triton',
'desc_act',
'no_inject_fused_attention',
'no_inject_fused_mlp',
'no_use_cuda_fp16',
'disable_exllama',
2023-08-24 21:27:36 +02:00
'cfg_cache',
'threads',
'n_batch',
'no_mmap',
2023-07-12 16:05:13 +02:00
'low_vram',
'mlock',
'mul_mat_q',
'n_gpu_layers',
'tensor_split',
'n_ctx',
2023-07-24 21:37:03 +02:00
'n_gqa',
'rms_norm_eps',
'llama_cpp_seed',
'gpu_split',
'max_seq_len',
'compress_pos_emb',
'alpha_value',
'rope_freq_base'
]
for i in range(torch.cuda.device_count()):
elements.append(f'gpu_memory_{i}')
2023-05-25 06:14:13 +02:00
return elements
def list_interface_input_elements():
elements = [
'max_new_tokens',
'auto_max_new_tokens',
'seed',
'temperature',
'top_p',
'top_k',
'typical_p',
'epsilon_cutoff',
'eta_cutoff',
'repetition_penalty',
'repetition_penalty_range',
'encoder_repetition_penalty',
'no_repeat_ngram_size',
'min_length',
'do_sample',
'penalty_alpha',
'num_beams',
'length_penalty',
'early_stopping',
'mirostat_mode',
'mirostat_tau',
'mirostat_eta',
'negative_prompt',
'guidance_scale',
'add_bos_token',
'ban_eos_token',
'truncation_length',
'custom_stopping_strings',
'skip_special_tokens',
'stream',
'tfs',
'top_a',
]
2023-08-13 06:12:15 +02:00
# Chat elements
elements += [
'textbox',
'start_with',
2023-08-13 06:12:15 +02:00
'character_menu',
'history',
'name1',
'name2',
'greeting',
'context',
'mode',
'instruction_template',
'name1_instruct',
'name2_instruct',
'context_instruct',
'turn_template',
'chat_style',
'chat-instruct_command',
]
2023-08-13 06:12:15 +02:00
# Notebook/default elements
elements += [
'textbox-notebook',
'textbox-default',
'output_textbox',
'prompt_menu-default',
'prompt_menu-notebook',
2023-08-13 06:12:15 +02:00
]
# Model elements
elements += list_model_elements()
2023-08-13 06:12:15 +02:00
2023-04-12 15:27:06 +02:00
return elements
def gather_interface_values(*args):
output = {}
for i, element in enumerate(list_interface_input_elements()):
2023-04-12 15:27:06 +02:00
output[element] = args[i]
if not shared.args.multi_user:
shared.persistent_interface_state = output
2023-04-12 15:27:06 +02:00
return output
def apply_interface_values(state, use_persistent=False):
if use_persistent:
state = shared.persistent_interface_state
elements = list_interface_input_elements()
if len(state) == 0:
return [gr.update() for k in elements] # Dummy, do nothing
else:
2023-07-07 18:09:14 +02:00
return [state[k] if k in state else gr.update() for k in elements]
def save_settings(state, preset, instruction_template, extensions, show_controls):
output = copy.deepcopy(shared.settings)
exclude = ['name1', 'name2', 'greeting', 'context', 'turn_template']
for k in state:
if k in shared.settings and k not in exclude:
output[k] = state[k]
output['preset'] = preset
output['prompt-default'] = state['prompt_menu-default']
output['prompt-notebook'] = state['prompt_menu-notebook']
output['character'] = state['character_menu']
output['instruction_template'] = instruction_template
output['default_extensions'] = extensions
output['seed'] = int(output['seed'])
output['show_controls'] = show_controls
return yaml.dump(output, sort_keys=False, width=float("inf"))
class ToolButton(gr.Button, gr.components.IOComponent):
2023-07-26 00:49:04 +02:00
"""
Small button with single emoji as text, fits inside gradio forms
Copied from https://github.com/AUTOMATIC1111/stable-diffusion-webui
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
def get_block_name(self):
return "button"
def create_refresh_button(refresh_component, refresh_method, refreshed_args, elem_class):
2023-07-26 00:49:04 +02:00
"""
Copied from https://github.com/AUTOMATIC1111/stable-diffusion-webui
"""
def refresh():
refresh_method()
args = refreshed_args() if callable(refreshed_args) else refreshed_args
for k, v in args.items():
setattr(refresh_component, k, v)
return gr.update(**(args or {}))
refresh_button = ToolButton(value=refresh_symbol, elem_classes=elem_class)
refresh_button.click(
fn=refresh,
inputs=[],
outputs=[refresh_component]
)
return refresh_button
def create_delete_button(**kwargs):
return ToolButton(value=delete_symbol, **kwargs)
def create_save_button(**kwargs):
return ToolButton(value=save_symbol, **kwargs)