Fix start_with

This commit is contained in:
oobabooga 2023-07-03 23:32:02 -07:00
parent 55457549cd
commit 463ddfffd0
2 changed files with 4 additions and 3 deletions

View File

@ -10,6 +10,7 @@ import requests
import torch import torch
from PIL import Image from PIL import Image
from modules import shared
from modules.models import reload_model, unload_model from modules.models import reload_model, unload_model
from modules.ui import create_refresh_button from modules.ui import create_refresh_button

View File

@ -294,12 +294,12 @@ def generate_chat_reply(text, state, regenerate=False, _continue=False, loading_
def generate_chat_reply_wrapper(text, start_with, state, regenerate=False, _continue=False): def generate_chat_reply_wrapper(text, start_with, state, regenerate=False, _continue=False):
if start_with != '' and not _continue: if start_with != '' and not _continue:
if regenerate: if regenerate:
text = remove_last_message() text, state['history'] = remove_last_message(state['history'])
regenerate = False regenerate = False
_continue = True _continue = True
send_dummy_message(text) send_dummy_message(text, state)
send_dummy_reply(start_with) send_dummy_reply(start_with, state)
for i, history in enumerate(generate_chat_reply(text, state, regenerate, _continue, loading_message=True)): for i, history in enumerate(generate_chat_reply(text, state, regenerate, _continue, loading_message=True)):
yield chat_html_wrapper(history, state['name1'], state['name2'], state['mode'], state['chat_style']), history yield chat_html_wrapper(history, state['name1'], state['name2'], state['mode'], state['chat_style']), history