Minor change to prevent future bugs

This commit is contained in:
oobabooga 2023-06-25 01:38:54 -03:00
parent e6e5f546b8
commit 365b672531

View File

@ -1,4 +1,5 @@
import ast import ast
import copy
import random import random
import re import re
import time import time
@ -9,8 +10,11 @@ import torch
import transformers import transformers
import modules.shared as shared import modules.shared as shared
from modules.callbacks import (Iteratorize, Stream, from modules.callbacks import (
_StopEverythingStoppingCriteria) Iteratorize,
Stream,
_StopEverythingStoppingCriteria
)
from modules.extensions import apply_extensions from modules.extensions import apply_extensions
from modules.html_generator import generate_4chan_html, generate_basic_html from modules.html_generator import generate_4chan_html, generate_basic_html
from modules.logging_colors import logger from modules.logging_colors import logger
@ -204,6 +208,7 @@ def _generate_reply(question, state, stopping_strings=None, is_chat=False):
reply = '' reply = ''
is_stream = state['stream'] is_stream = state['stream']
if len(all_stop_strings) > 0 and not state['stream']: if len(all_stop_strings) > 0 and not state['stream']:
state = copy.deepcopy(state)
state['stream'] = True state['stream'] = True
for reply in generate_func(question, original_question, seed, state, stopping_strings, is_chat=is_chat): for reply in generate_func(question, original_question, seed, state, stopping_strings, is_chat=is_chat):