Fix llama.cpp with --no-stream

This commit is contained in:
oobabooga 2023-03-31 18:43:45 -03:00
parent 875de5d983
commit 2259143fec

View File

@ -1,8 +1,5 @@
from pathlib import Path
import llamacpp
import modules.shared as shared
from modules.callbacks import Iteratorize
@ -65,6 +62,7 @@ class LlamaCppModel:
self.model.eval()
token = self.model.sample()
text = self.model.token_to_str(token)
output += text
is_end_of_text = token == self.model.token_eos()
if callback:
callback(text)