Merge pull request #6203 from oobabooga/dev

Merge dev branch
This commit is contained in:
oobabooga 2024-07-05 07:37:19 -03:00 committed by GitHub
commit e813b322cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 2 deletions

View File

@ -577,7 +577,7 @@ def find_all_histories_with_first_prompts(state):
data = json.load(f) data = json.load(f)
first_prompt = "" first_prompt = ""
if 'visible' in data and len(data['visible']) > 0: if data and 'visible' in data and len(data['visible']) > 0:
if data['internal'][0][0] == '<|BEGIN-VISIBLE-CHAT|>': if data['internal'][0][0] == '<|BEGIN-VISIBLE-CHAT|>':
if len(data['visible']) > 1: if len(data['visible']) > 1:
first_prompt = html.unescape(data['visible'][1][0]) first_prompt = html.unescape(data['visible'][1][0])

View File

@ -100,9 +100,11 @@ def eval_with_progress(self, tokens: Sequence[int]):
def monkey_patch_llama_cpp_python(lib): def monkey_patch_llama_cpp_python(lib):
if getattr(lib.Llama, '_is_patched', False):
# If the patch is already applied, do nothing
return
def my_generate(self, *args, **kwargs): def my_generate(self, *args, **kwargs):
if shared.args.streaming_llm: if shared.args.streaming_llm:
new_sequence = args[0] new_sequence = args[0]
past_sequence = self._input_ids past_sequence = self._input_ids
@ -116,3 +118,6 @@ def monkey_patch_llama_cpp_python(lib):
lib.Llama.eval = eval_with_progress lib.Llama.eval = eval_with_progress
lib.Llama.original_generate = lib.Llama.generate lib.Llama.original_generate = lib.Llama.generate
lib.Llama.generate = my_generate lib.Llama.generate = my_generate
# Set the flag to indicate that the patch has been applied
lib.Llama._is_patched = True