Add a warning for when no model is loaded

This commit is contained in:
oobabooga 2023-04-13 10:35:08 -03:00
parent ddbd237ec9
commit 04866dc4fc
2 changed files with 16 additions and 0 deletions

View File

@ -117,6 +117,11 @@ def extract_message_from_reply(reply, state):
def chatbot_wrapper(text, state, regenerate=False, _continue=False): def chatbot_wrapper(text, state, regenerate=False, _continue=False):
if shared.model_name == 'None':
print("No model is loaded! Select one in the Model tab.")
yield shared.history['visible']
return
# Defining some variables # Defining some variables
cumulative_reply = '' cumulative_reply = ''
last_reply = [shared.history['internal'][-1][1], shared.history['visible'][-1][1]] if _continue else None last_reply = [shared.history['internal'][-1][1], shared.history['visible'][-1][1]] if _continue else None
@ -190,6 +195,11 @@ def chatbot_wrapper(text, state, regenerate=False, _continue=False):
def impersonate_wrapper(text, state): def impersonate_wrapper(text, state):
if shared.model_name == 'None':
print("No model is loaded! Select one in the Model tab.")
yield ''
return
# Defining some variables # Defining some variables
cumulative_reply = '' cumulative_reply = ''
eos_token = '\n' if state['stop_at_newline'] else None eos_token = '\n' if state['stop_at_newline'] else None

View File

@ -124,6 +124,12 @@ def stop_everything_event():
def generate_reply(question, state, eos_token=None, stopping_strings=[]): def generate_reply(question, state, eos_token=None, stopping_strings=[]):
if shared.model_name == 'None':
print("No model is loaded! Select one in the Model tab.")
yield formatted_outputs(question, shared.model_name)
return
clear_torch_cache() clear_torch_cache()
seed = set_manual_seed(state['seed']) seed = set_manual_seed(state['seed'])
shared.stop_everything = False shared.stop_everything = False