mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
API: set do_sample=false when temperature=0 (#5275)
This commit is contained in:
parent
3fef37cda8
commit
232c07bf1f
@ -97,6 +97,9 @@ async def openai_completions(request: Request, request_data: CompletionRequest):
|
|||||||
path = request.url.path
|
path = request.url.path
|
||||||
is_legacy = "/generate" in path
|
is_legacy = "/generate" in path
|
||||||
|
|
||||||
|
if request_data.temperature == 0:
|
||||||
|
request_data.do_sample = False
|
||||||
|
|
||||||
if request_data.stream:
|
if request_data.stream:
|
||||||
async def generator():
|
async def generator():
|
||||||
async with streaming_semaphore:
|
async with streaming_semaphore:
|
||||||
@ -120,6 +123,9 @@ async def openai_chat_completions(request: Request, request_data: ChatCompletion
|
|||||||
path = request.url.path
|
path = request.url.path
|
||||||
is_legacy = "/generate" in path
|
is_legacy = "/generate" in path
|
||||||
|
|
||||||
|
if request_data.temperature == 0:
|
||||||
|
request_data.do_sample = False
|
||||||
|
|
||||||
if request_data.stream:
|
if request_data.stream:
|
||||||
async def generator():
|
async def generator():
|
||||||
async with streaming_semaphore:
|
async with streaming_semaphore:
|
||||||
|
Loading…
Reference in New Issue
Block a user