From f871971de1e274d4ce298ae0d19e27e3de5539a8 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Tue, 28 Feb 2023 00:25:30 -0300 Subject: [PATCH] Trying to get the chat to work --- modules/text_generation.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/text_generation.py b/modules/text_generation.py index 4e0056c6..ba4b7d79 100644 --- a/modules/text_generation.py +++ b/modules/text_generation.py @@ -22,6 +22,9 @@ def get_max_prompt_length(tokens): return max_length def encode(prompt, tokens_to_generate=0, add_special_tokens=True): + if shared.is_RWKV: + return prompt + input_ids = shared.tokenizer.encode(str(prompt), return_tensors='pt', truncation=True, max_length=get_max_prompt_length(tokens_to_generate), add_special_tokens=add_special_tokens) if shared.args.cpu: return input_ids