mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 23:57:58 +01:00
API: Remove tiktoken from logit bias (#5391)
This commit is contained in:
parent
40c7977f9b
commit
528318b700
@ -110,22 +110,6 @@ def process_parameters(body, is_legacy=False):
|
|||||||
logits_processor = []
|
logits_processor = []
|
||||||
logit_bias = body.get('logit_bias', None)
|
logit_bias = body.get('logit_bias', None)
|
||||||
if logit_bias: # {str: float, ...}
|
if logit_bias: # {str: float, ...}
|
||||||
# XXX convert tokens from tiktoken based on requested model
|
|
||||||
# Ex.: 'logit_bias': {'1129': 100, '11442': 100, '16243': 100}
|
|
||||||
try:
|
|
||||||
encoder = tiktoken.encoding_for_model(generate_params['model'])
|
|
||||||
new_logit_bias = {}
|
|
||||||
for logit, bias in logit_bias.items():
|
|
||||||
for x in encode(encoder.decode([int(logit)]), add_special_tokens=False)[0]:
|
|
||||||
if int(x) in [0, 1, 2, 29871]: # XXX LLAMA tokens
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_logit_bias[str(int(x))] = bias
|
|
||||||
debug_msg('logit_bias_map', logit_bias, '->', new_logit_bias)
|
|
||||||
logit_bias = new_logit_bias
|
|
||||||
except KeyError:
|
|
||||||
pass # assume native tokens if we can't find the tokenizer
|
|
||||||
|
|
||||||
logits_processor = [LogitsBiasProcessor(logit_bias)]
|
logits_processor = [LogitsBiasProcessor(logit_bias)]
|
||||||
|
|
||||||
logprobs = None # coming to chat eventually
|
logprobs = None # coming to chat eventually
|
||||||
|
Loading…
Reference in New Issue
Block a user