API: Remove tiktoken from logit bias (#5391)

This commit is contained in:
Forkoz 2024-01-28 18:42:03 -06:00 committed by GitHub
parent 40c7977f9b
commit 528318b700
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -110,22 +110,6 @@ def process_parameters(body, is_legacy=False):
logits_processor = [] logits_processor = []
logit_bias = body.get('logit_bias', None) logit_bias = body.get('logit_bias', None)
if logit_bias: # {str: float, ...} if logit_bias: # {str: float, ...}
# XXX convert tokens from tiktoken based on requested model
# Ex.: 'logit_bias': {'1129': 100, '11442': 100, '16243': 100}
try:
encoder = tiktoken.encoding_for_model(generate_params['model'])
new_logit_bias = {}
for logit, bias in logit_bias.items():
for x in encode(encoder.decode([int(logit)]), add_special_tokens=False)[0]:
if int(x) in [0, 1, 2, 29871]: # XXX LLAMA tokens
continue
new_logit_bias[str(int(x))] = bias
debug_msg('logit_bias_map', logit_bias, '->', new_logit_bias)
logit_bias = new_logit_bias
except KeyError:
pass # assume native tokens if we can't find the tokenizer
logits_processor = [LogitsBiasProcessor(logit_bias)] logits_processor = [LogitsBiasProcessor(logit_bias)]
logprobs = None # coming to chat eventually logprobs = None # coming to chat eventually