mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Add decode functions to llama.cpp/exllama
This commit is contained in:
parent
1ba2e88551
commit
b6643e5039
@ -120,3 +120,6 @@ class ExllamaModel:
|
||||
|
||||
def encode(self, string, **kwargs):
|
||||
return self.tokenizer.encode(string)
|
||||
|
||||
def decode(self, string, **kwargs):
|
||||
return self.tokenizer.decode(string)[0]
|
||||
|
@ -65,6 +65,9 @@ class LlamaCppModel:
|
||||
|
||||
return self.model.tokenize(string)
|
||||
|
||||
def decode(self, tokens):
|
||||
return self.model.detokenize(tokens)
|
||||
|
||||
def generate(self, prompt, state, callback=None):
|
||||
prompt = prompt if type(prompt) is str else prompt.decode()
|
||||
completion_chunks = self.model.create_completion(
|
||||
|
Loading…
Reference in New Issue
Block a user