mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Fix Open Assistant
This commit is contained in:
parent
1ddcd4d0ba
commit
b9e0712b92
@ -37,9 +37,13 @@ def encode(prompt, tokens_to_generate=0, add_special_tokens=True):
|
||||
return input_ids.cuda()
|
||||
|
||||
def decode(output_ids):
|
||||
reply = shared.tokenizer.decode(output_ids, skip_special_tokens=True)
|
||||
reply = reply.replace(r'<|endoftext|>', '')
|
||||
return reply
|
||||
# Open Assistant relies on special tokens like <|endoftext|>
|
||||
if re.match('oasst-*', shared.model_name.lower()):
|
||||
return shared.tokenizer.decode(output_ids, skip_special_tokens=False)
|
||||
else:
|
||||
reply = shared.tokenizer.decode(output_ids, skip_special_tokens=True)
|
||||
reply = reply.replace(r'<|endoftext|>', '')
|
||||
return reply
|
||||
|
||||
def generate_softprompt_input_tensors(input_ids):
|
||||
inputs_embeds = shared.model.transformer.wte(input_ids)
|
||||
|
Loading…
Reference in New Issue
Block a user