mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-12-24 13:28:59 +01:00
Add a button for rendering markdown for any model
This commit is contained in:
parent
9a43656a50
commit
37d4ad012b
@ -15,7 +15,7 @@ Its goal is to become the [AUTOMATIC1111/stable-diffusion-webui](https://github.
|
||||
* Chat mode for conversation and role-playing
|
||||
* Instruct mode compatible with various formats, including Alpaca, Vicuna, Open Assistant, Dolly, Koala, ChatGLM, MOSS, RWKV-Raven, Galactica, StableLM, WizardLM, Baize, Ziya, Chinese-Vicuna, MPT, INCITE, Wizard Mega, KoAlpaca, Vigogne, Bactrian, h2o, and OpenBuddy
|
||||
* [Multimodal pipelines, including LLaVA and MiniGPT-4](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/multimodal)
|
||||
* Markdown output for [GALACTICA](https://github.com/paperswithcode/galai), including LaTeX rendering
|
||||
* Markdown output with LaTeX rendering, to use for instance with [GALACTICA](https://github.com/paperswithcode/galai)
|
||||
* Nice HTML output for GPT-4chan
|
||||
* [Custom chat characters](docs/Chat-mode.md)
|
||||
* Advanced chat features (send images, get audio responses with TTS)
|
||||
|
@ -130,14 +130,11 @@ def get_reply_from_output_ids(output_ids, input_ids, original_question, state, i
|
||||
|
||||
|
||||
def formatted_outputs(reply, model_name):
|
||||
if shared.model_type == 'galactica':
|
||||
reply = fix_galactica(reply)
|
||||
return reply, reply, generate_basic_html(reply)
|
||||
elif shared.model_type == 'gpt4chan':
|
||||
if shared.model_type == 'gpt4chan':
|
||||
reply = fix_gpt4chan(reply)
|
||||
return reply, 'Only applicable for GALACTICA models.', generate_4chan_html(reply)
|
||||
return reply, generate_4chan_html(reply)
|
||||
else:
|
||||
return reply, 'Only applicable for GALACTICA models.', generate_basic_html(reply)
|
||||
return reply, generate_basic_html(reply)
|
||||
|
||||
|
||||
def set_manual_seed(seed):
|
||||
|
@ -709,6 +709,7 @@ def create_interface():
|
||||
shared.gradio['textbox'] = gr.Textbox(value=default_text, elem_classes="textbox", lines=27)
|
||||
|
||||
with gr.Tab('Markdown'):
|
||||
shared.gradio['markdown_render'] = gr.Button('Render')
|
||||
shared.gradio['markdown'] = gr.Markdown()
|
||||
|
||||
with gr.Tab('HTML'):
|
||||
@ -769,6 +770,7 @@ def create_interface():
|
||||
shared.gradio['output_textbox'] = gr.Textbox(elem_classes="textbox_default_output", lines=27, label='Output')
|
||||
|
||||
with gr.Tab('Markdown'):
|
||||
shared.gradio['markdown_render'] = gr.Button('Render')
|
||||
shared.gradio['markdown'] = gr.Markdown()
|
||||
|
||||
with gr.Tab('HTML'):
|
||||
@ -944,9 +946,9 @@ def create_interface():
|
||||
else:
|
||||
shared.input_params = [shared.gradio[k] for k in ['textbox', 'interface_state']]
|
||||
if shared.args.notebook:
|
||||
output_params = [shared.gradio[k] for k in ['textbox', 'markdown', 'html']]
|
||||
output_params = [shared.gradio[k] for k in ['textbox', 'html']]
|
||||
else:
|
||||
output_params = [shared.gradio[k] for k in ['output_textbox', 'markdown', 'html']]
|
||||
output_params = [shared.gradio[k] for k in ['output_textbox', 'html']]
|
||||
|
||||
gen_events.append(shared.gradio['Generate'].click(
|
||||
lambda x: x, shared.gradio['textbox'], shared.gradio['last_input']).then(
|
||||
@ -966,6 +968,7 @@ def create_interface():
|
||||
|
||||
if shared.args.notebook:
|
||||
shared.gradio['Undo'].click(lambda x: x, shared.gradio['last_input'], shared.gradio['textbox'], show_progress=False)
|
||||
shared.gradio['markdown_render'].click(lambda x: x, shared.gradio['textbox'], shared.gradio['markdown'], queue=False)
|
||||
gen_events.append(shared.gradio['Regenerate'].click(
|
||||
lambda x: x, shared.gradio['last_input'], shared.gradio['textbox'], show_progress=False).then(
|
||||
ui.gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
||||
@ -974,6 +977,7 @@ def create_interface():
|
||||
# lambda: None, None, None, _js="() => {element = document.getElementsByTagName('textarea')[0]; element.scrollTop = element.scrollHeight}")
|
||||
)
|
||||
else:
|
||||
shared.gradio['markdown_render'].click(lambda x: x, shared.gradio['output_textbox'], shared.gradio['markdown'], queue=False)
|
||||
gen_events.append(shared.gradio['Continue'].click(
|
||||
ui.gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
||||
generate_reply_wrapper, [shared.gradio['output_textbox']] + shared.input_params[1:], output_params, show_progress=False).then(
|
||||
|
Loading…
Reference in New Issue
Block a user