mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-12-24 13:28:59 +01:00
Remove an error message
This commit is contained in:
parent
661bfaac8e
commit
0ede2965d5
@ -48,4 +48,4 @@ def count_tokens(text):
|
||||
tokens = get_encoded_length(text)
|
||||
return str(tokens)
|
||||
except:
|
||||
return '-1'
|
||||
return '0'
|
||||
|
@ -107,7 +107,6 @@ def _generate_reply(question, state, stopping_strings=None, is_chat=False, escap
|
||||
|
||||
def encode(prompt, add_special_tokens=True, add_bos_token=True, truncation_length=None):
|
||||
if shared.tokenizer is None:
|
||||
logger.error('No tokenizer is loaded')
|
||||
raise ValueError('No tokenizer is loaded')
|
||||
|
||||
if shared.model.__class__.__name__ in ['LlamaCppModel', 'RWKVModel', 'CtransformersModel', 'Exllamav2Model']:
|
||||
@ -138,7 +137,6 @@ def encode(prompt, add_special_tokens=True, add_bos_token=True, truncation_lengt
|
||||
|
||||
def decode(output_ids, skip_special_tokens=True):
|
||||
if shared.tokenizer is None:
|
||||
logger.error('No tokenizer is loaded')
|
||||
raise ValueError('No tokenizer is loaded')
|
||||
|
||||
return shared.tokenizer.decode(output_ids, skip_special_tokens)
|
||||
|
Loading…
Reference in New Issue
Block a user