mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 23:57:58 +01:00
API: set do_sample=false when temperature=0 (#5275)
This commit is contained in:
parent
3fef37cda8
commit
232c07bf1f
@ -97,6 +97,9 @@ async def openai_completions(request: Request, request_data: CompletionRequest):
|
|||||||
path = request.url.path
|
path = request.url.path
|
||||||
is_legacy = "/generate" in path
|
is_legacy = "/generate" in path
|
||||||
|
|
||||||
|
if request_data.temperature == 0:
|
||||||
|
request_data.do_sample = False
|
||||||
|
|
||||||
if request_data.stream:
|
if request_data.stream:
|
||||||
async def generator():
|
async def generator():
|
||||||
async with streaming_semaphore:
|
async with streaming_semaphore:
|
||||||
@ -120,6 +123,9 @@ async def openai_chat_completions(request: Request, request_data: ChatCompletion
|
|||||||
path = request.url.path
|
path = request.url.path
|
||||||
is_legacy = "/generate" in path
|
is_legacy = "/generate" in path
|
||||||
|
|
||||||
|
if request_data.temperature == 0:
|
||||||
|
request_data.do_sample = False
|
||||||
|
|
||||||
if request_data.stream:
|
if request_data.stream:
|
||||||
async def generator():
|
async def generator():
|
||||||
async with streaming_semaphore:
|
async with streaming_semaphore:
|
||||||
|
Loading…
Reference in New Issue
Block a user