mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-25 17:29:22 +01:00
Reload the default chat on page refresh
This commit is contained in:
parent
f1914115d3
commit
3b8cecbab7
@ -300,6 +300,10 @@ def load_history(file, name1, name2):
|
|||||||
shared.history['internal'] = tokenize_dialogue(file, name1, name2)
|
shared.history['internal'] = tokenize_dialogue(file, name1, name2)
|
||||||
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
|
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
|
||||||
|
|
||||||
|
def load_default_history(name1, name2):
|
||||||
|
if Path(f'logs/persistent.json').exists():
|
||||||
|
load_history(open(Path(f'logs/persistent.json'), 'rb').read(), name1, name2)
|
||||||
|
|
||||||
def load_character(_character, name1, name2):
|
def load_character(_character, name1, name2):
|
||||||
context = ""
|
context = ""
|
||||||
shared.history['internal'] = []
|
shared.history['internal'] = []
|
||||||
|
15
server.py
15
server.py
@ -187,8 +187,7 @@ else:
|
|||||||
|
|
||||||
if shared.args.chat or shared.args.cai_chat:
|
if shared.args.chat or shared.args.cai_chat:
|
||||||
with gr.Blocks(css=ui.css+ui.chat_css, analytics_enabled=False) as interface:
|
with gr.Blocks(css=ui.css+ui.chat_css, analytics_enabled=False) as interface:
|
||||||
if Path(f'logs/persistent.json').exists():
|
interface.load(lambda : chat.load_default_history(shared.settings[f'name1{suffix}'], shared.settings[f'name2{suffix}']), None, None)
|
||||||
chat.load_history(open(Path(f'logs/persistent.json'), 'rb').read(), shared.settings[f'name1{suffix}'], shared.settings[f'name2{suffix}'])
|
|
||||||
if shared.args.cai_chat:
|
if shared.args.cai_chat:
|
||||||
display = gr.HTML(value=generate_chat_html(shared.history['visible'], shared.settings[f'name1{suffix}'], shared.settings[f'name2{suffix}'], shared.character))
|
display = gr.HTML(value=generate_chat_html(shared.history['visible'], shared.settings[f'name1{suffix}'], shared.settings[f'name2{suffix}'], shared.character))
|
||||||
else:
|
else:
|
||||||
@ -290,12 +289,12 @@ if shared.args.chat or shared.args.cai_chat:
|
|||||||
upload_img_me.upload(chat.upload_your_profile_picture, [upload_img_me], [])
|
upload_img_me.upload(chat.upload_your_profile_picture, [upload_img_me], [])
|
||||||
if shared.args.picture:
|
if shared.args.picture:
|
||||||
picture_select.upload(lambda : None, [], [picture_select], show_progress=False)
|
picture_select.upload(lambda : None, [], [picture_select], show_progress=False)
|
||||||
if shared.args.cai_chat:
|
|
||||||
upload_chat_history.upload(chat.redraw_html, [name1, name2], [display])
|
reload_func = chat.redraw_html if shared.args.cai_chat else lambda : shared.history['visible']
|
||||||
upload_img_me.upload(chat.redraw_html, [name1, name2], [display])
|
reload_inputs = [name1, name2] if shared.args.cai_chat else []
|
||||||
else:
|
upload_chat_history.upload(reload_func, reload_inputs, [display])
|
||||||
upload_chat_history.upload(lambda : shared.history['visible'], [], [display])
|
upload_img_me.upload(reload_func, reload_inputs, [display])
|
||||||
upload_img_me.upload(lambda : shared.history['visible'], [], [display])
|
interface.load(reload_func, reload_inputs, [display], show_progress=False)
|
||||||
|
|
||||||
elif shared.args.notebook:
|
elif shared.args.notebook:
|
||||||
with gr.Blocks(css=ui.css, analytics_enabled=False) as interface:
|
with gr.Blocks(css=ui.css, analytics_enabled=False) as interface:
|
||||||
|
Loading…
Reference in New Issue
Block a user