From f8079d067d11e36a5d24ec162762e83e9d8f1f02 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Sat, 16 Dec 2023 10:52:41 -0800 Subject: [PATCH] UI: save the sent chat message on "no model is loaded" error --- modules/chat.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/chat.py b/modules/chat.py index 7a44c03e..613cae1b 100644 --- a/modules/chat.py +++ b/modules/chat.py @@ -210,10 +210,6 @@ def chatbot_wrapper(text, state, regenerate=False, _continue=False, loading_mess output = copy.deepcopy(history) output = apply_extensions('history', output) state = apply_extensions('state', state) - if shared.model_name == 'None' or shared.model is None: - logger.error("No model is loaded! Select one in the Model tab.") - yield output - return visible_text = None stopping_strings = get_stopping_strings(state) @@ -252,6 +248,9 @@ def chatbot_wrapper(text, state, regenerate=False, _continue=False, loading_mess 'internal': output['internal'] } + if shared.model_name == 'None' or shared.model is None: + raise ValueError("No model is loaded! Select one in the Model tab.") + # Generate the prompt kwargs = { '_continue': _continue,