mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-25 17:29:22 +01:00
is_chat changes fix for openai extension (#2008)
This commit is contained in:
parent
394bb253db
commit
c4f0e6d740
@ -147,15 +147,6 @@ class Handler(BaseHTTPRequestHandler):
|
|||||||
self.send_error(404)
|
self.send_error(404)
|
||||||
|
|
||||||
def do_POST(self):
|
def do_POST(self):
|
||||||
# ... haaack.
|
|
||||||
is_chat = shared.args.chat
|
|
||||||
try:
|
|
||||||
shared.args.chat = True
|
|
||||||
self.do_POST_wrap()
|
|
||||||
finally:
|
|
||||||
shared.args.chat = is_chat
|
|
||||||
|
|
||||||
def do_POST_wrap(self):
|
|
||||||
if debug:
|
if debug:
|
||||||
print(self.headers) # did you know... python-openai sends your linux kernel & python version?
|
print(self.headers) # did you know... python-openai sends your linux kernel & python version?
|
||||||
content_length = int(self.headers['Content-Length'])
|
content_length = int(self.headers['Content-Length'])
|
||||||
@ -349,7 +340,7 @@ class Handler(BaseHTTPRequestHandler):
|
|||||||
# generate reply #######################################
|
# generate reply #######################################
|
||||||
if debug:
|
if debug:
|
||||||
print({'prompt': prompt, 'req_params': req_params, 'stopping_strings': stopping_strings})
|
print({'prompt': prompt, 'req_params': req_params, 'stopping_strings': stopping_strings})
|
||||||
generator = generate_reply(prompt, req_params, stopping_strings=stopping_strings)
|
generator = generate_reply(prompt, req_params, stopping_strings=stopping_strings, is_chat=True)
|
||||||
|
|
||||||
answer = ''
|
answer = ''
|
||||||
seen_content = ''
|
seen_content = ''
|
||||||
@ -526,7 +517,7 @@ class Handler(BaseHTTPRequestHandler):
|
|||||||
if debug:
|
if debug:
|
||||||
print({'edit_template': edit_task, 'req_params': req_params, 'token_count': token_count})
|
print({'edit_template': edit_task, 'req_params': req_params, 'token_count': token_count})
|
||||||
|
|
||||||
generator = generate_reply(edit_task, req_params, stopping_strings=standard_stopping_strings)
|
generator = generate_reply(edit_task, req_params, stopping_strings=standard_stopping_strings, is_chat=True)
|
||||||
|
|
||||||
answer = ''
|
answer = ''
|
||||||
for a in generator:
|
for a in generator:
|
||||||
|
Loading…
Reference in New Issue
Block a user