2023-11-06 06:38:29 +01:00
import json
import time
2023-12-15 04:22:43 +01:00
from typing import Dict , List
2023-11-06 06:38:29 +01:00
from pydantic import BaseModel , Field
class GenerationOptions ( BaseModel ) :
2023-11-07 17:23:17 +01:00
preset : str | None = Field ( default = None , description = " The name of a file under text-generation-webui/presets (without the .yaml extension). The sampling parameters that get overwritten by this option are the keys in the default_preset() function in modules/presets.py. " )
2023-11-06 06:38:29 +01:00
min_p : float = 0
2024-01-07 21:03:47 +01:00
dynamic_temperature : bool = False
dynamic_temperature_low : float = 0.1
2023-11-06 06:38:29 +01:00
top_k : int = 0
repetition_penalty : float = 1
2023-12-08 00:04:52 +01:00
repetition_penalty_range : int = 1024
2023-11-06 06:38:29 +01:00
typical_p : float = 1
tfs : float = 1
top_a : float = 0
epsilon_cutoff : float = 0
eta_cutoff : float = 0
guidance_scale : float = 1
negative_prompt : str = ' '
penalty_alpha : float = 0
mirostat_mode : int = 0
mirostat_tau : float = 5
mirostat_eta : float = 0.1
2023-11-06 16:11:49 +01:00
temperature_last : bool = False
2023-11-06 06:38:29 +01:00
do_sample : bool = True
seed : int = - 1
encoder_repetition_penalty : float = 1
no_repeat_ngram_size : int = 0
min_length : int = 0
num_beams : int = 1
length_penalty : float = 1
early_stopping : bool = False
truncation_length : int = 0
max_tokens_second : int = 0
custom_token_bans : str = " "
auto_max_new_tokens : bool = False
ban_eos_token : bool = False
add_bos_token : bool = True
skip_special_tokens : bool = True
grammar_string : str = " "
2023-11-06 18:55:36 +01:00
class CompletionRequestParams ( BaseModel ) :
2023-11-10 15:47:00 +01:00
model : str | None = Field ( default = None , description = " Unused parameter. To change the model, use the /v1/internal/model/load endpoint. " )
2023-11-06 06:38:29 +01:00
prompt : str | List [ str ]
2023-11-07 17:56:09 +01:00
best_of : int | None = Field ( default = 1 , description = " Unused parameter. " )
2023-11-06 06:38:29 +01:00
echo : bool | None = False
frequency_penalty : float | None = 0
logit_bias : dict | None = None
logprobs : int | None = None
max_tokens : int | None = 16
2023-11-07 17:56:09 +01:00
n : int | None = Field ( default = 1 , description = " Unused parameter. " )
2023-11-08 15:23:51 +01:00
presence_penalty : float | None = 0
2023-11-06 06:38:29 +01:00
stop : str | List [ str ] | None = None
stream : bool | None = False
suffix : str | None = None
temperature : float | None = 1
top_p : float | None = 1
2023-11-07 17:43:45 +01:00
user : str | None = Field ( default = None , description = " Unused parameter. " )
2023-11-06 06:38:29 +01:00
2023-11-06 18:55:36 +01:00
class CompletionRequest ( GenerationOptions , CompletionRequestParams ) :
pass
2023-11-06 06:38:29 +01:00
class CompletionResponse ( BaseModel ) :
id : str
choices : List [ dict ]
created : int = int ( time . time ( ) )
model : str
object : str = " text_completion "
usage : dict
2023-11-06 18:55:36 +01:00
class ChatCompletionRequestParams ( BaseModel ) :
2023-11-06 06:38:29 +01:00
messages : List [ dict ]
2023-11-10 15:47:00 +01:00
model : str | None = Field ( default = None , description = " Unused parameter. To change the model, use the /v1/internal/model/load endpoint. " )
2023-11-06 06:38:29 +01:00
frequency_penalty : float | None = 0
2023-11-07 17:56:09 +01:00
function_call : str | dict | None = Field ( default = None , description = " Unused parameter. " )
functions : List [ dict ] | None = Field ( default = None , description = " Unused parameter. " )
2023-11-06 06:38:29 +01:00
logit_bias : dict | None = None
max_tokens : int | None = None
2023-11-07 17:56:09 +01:00
n : int | None = Field ( default = 1 , description = " Unused parameter. " )
2023-11-08 15:23:51 +01:00
presence_penalty : float | None = 0
2023-11-06 06:38:29 +01:00
stop : str | List [ str ] | None = None
stream : bool | None = False
temperature : float | None = 1
top_p : float | None = 1
2023-11-07 17:56:09 +01:00
user : str | None = Field ( default = None , description = " Unused parameter. " )
2023-11-06 06:38:29 +01:00
mode : str = Field ( default = ' instruct ' , description = " Valid options: instruct, chat, chat-instruct. " )
2023-12-12 21:23:14 +01:00
instruction_template : str | None = Field ( default = None , description = " An instruction template defined under text-generation-webui/instruction-templates. If not set, the correct template will be automatically obtained from the model metadata. " )
instruction_template_str : str | None = Field ( default = None , description = " A Jinja2 instruction template. If set, will take precedence over everything else. " )
2023-11-06 06:38:29 +01:00
character : str | None = Field ( default = None , description = " A character defined under text-generation-webui/characters. If not set, the default \" Assistant \" character will be used. " )
2023-11-16 03:39:08 +01:00
name1 : str | None = Field ( default = None , description = " Your name (the user). By default, it ' s \" You \" . " )
2023-11-06 06:38:29 +01:00
name2 : str | None = Field ( default = None , description = " Overwrites the value set by character. " )
context : str | None = Field ( default = None , description = " Overwrites the value set by character. " )
greeting : str | None = Field ( default = None , description = " Overwrites the value set by character. " )
2023-12-12 21:23:14 +01:00
chat_template_str : str | None = Field ( default = None , description = " Jinja2 template for chat. " )
2023-11-06 06:38:29 +01:00
chat_instruct_command : str | None = None
continue_ : bool = Field ( default = False , description = " Makes the last bot message in the history be continued instead of starting a new message. " )
2023-11-06 18:55:36 +01:00
class ChatCompletionRequest ( GenerationOptions , ChatCompletionRequestParams ) :
pass
2023-11-06 06:38:29 +01:00
class ChatCompletionResponse ( BaseModel ) :
id : str
choices : List [ dict ]
created : int = int ( time . time ( ) )
model : str
object : str = " chat.completion "
usage : dict
2023-11-19 04:35:22 +01:00
class EmbeddingsRequest ( BaseModel ) :
2023-12-15 04:26:16 +01:00
input : str | List [ str ] | List [ int ] | List [ List [ int ] ]
2023-11-19 04:35:22 +01:00
model : str | None = Field ( default = None , description = " Unused parameter. To change the model, set the OPENEDAI_EMBEDDING_MODEL and OPENEDAI_EMBEDDING_DEVICE environment variables before starting the server. " )
encoding_format : str = Field ( default = " float " , description = " Can be float or base64. " )
user : str | None = Field ( default = None , description = " Unused parameter. " )
class EmbeddingsResponse ( BaseModel ) :
index : int
embedding : List [ float ]
object : str = " embedding "
2023-11-08 04:05:36 +01:00
class EncodeRequest ( BaseModel ) :
text : str
2023-11-19 03:19:31 +01:00
class EncodeResponse ( BaseModel ) :
2023-11-08 04:05:36 +01:00
tokens : List [ int ]
2023-11-19 03:19:31 +01:00
length : int
2023-11-08 04:05:36 +01:00
2023-11-19 03:19:31 +01:00
class DecodeRequest ( BaseModel ) :
2023-11-08 04:05:36 +01:00
tokens : List [ int ]
class DecodeResponse ( BaseModel ) :
text : str
class TokenCountResponse ( BaseModel ) :
length : int
2023-11-19 03:19:31 +01:00
class LogitsRequestParams ( BaseModel ) :
prompt : str
use_samplers : bool = False
2023-12-15 04:22:43 +01:00
top_logits : int | None = 50
2023-11-19 03:19:31 +01:00
frequency_penalty : float | None = 0
max_tokens : int | None = 16
presence_penalty : float | None = 0
temperature : float | None = 1
top_p : float | None = 1
class LogitsRequest ( GenerationOptions , LogitsRequestParams ) :
pass
class LogitsResponse ( BaseModel ) :
2023-12-15 04:22:43 +01:00
logits : Dict [ str , float ]
2023-11-19 03:19:31 +01:00
2023-11-08 03:59:02 +01:00
class ModelInfoResponse ( BaseModel ) :
model_name : str
lora_names : List [ str ]
2023-11-19 04:35:22 +01:00
class ModelListResponse ( BaseModel ) :
model_names : List [ str ]
2023-11-08 05:58:06 +01:00
class LoadModelRequest ( BaseModel ) :
model_name : str
args : dict | None = None
settings : dict | None = None
2023-11-19 04:35:22 +01:00
class LoraListResponse ( BaseModel ) :
lora_names : List [ str ]
2023-11-10 16:34:27 +01:00
2023-11-19 04:35:22 +01:00
class LoadLorasRequest ( BaseModel ) :
lora_names : List [ str ]
2023-11-10 16:34:27 +01:00
2023-11-06 06:38:29 +01:00
def to_json ( obj ) :
return json . dumps ( obj . __dict__ , indent = 4 )
def to_dict ( obj ) :
return obj . __dict__