2023-07-11 23:50:08 +02:00
|
|
|
from extensions.openai.utils import float_list_to_base64
|
|
|
|
from modules.text_generation import encode, decode
|
2023-07-24 16:28:12 +02:00
|
|
|
import numpy as np
|
2023-07-12 20:33:25 +02:00
|
|
|
|
2023-07-11 23:50:08 +02:00
|
|
|
def token_count(prompt):
|
|
|
|
tokens = encode(prompt)[0]
|
|
|
|
|
|
|
|
return {
|
|
|
|
'results': [{
|
|
|
|
'tokens': len(tokens)
|
|
|
|
}]
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-07-24 16:28:12 +02:00
|
|
|
def token_encode(input, encoding_format):
|
2023-07-12 20:33:25 +02:00
|
|
|
# if isinstance(input, list):
|
2023-07-11 23:50:08 +02:00
|
|
|
tokens = encode(input)[0]
|
|
|
|
|
|
|
|
return {
|
|
|
|
'results': [{
|
2023-07-24 16:28:12 +02:00
|
|
|
'tokens': tokens,
|
2023-07-11 23:50:08 +02:00
|
|
|
'length': len(tokens),
|
|
|
|
}]
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def token_decode(tokens, encoding_format):
|
2023-07-12 20:33:25 +02:00
|
|
|
# if isinstance(input, list):
|
|
|
|
# if encoding_format == "base64":
|
|
|
|
# tokens = base64_to_float_list(tokens)
|
2023-07-11 23:50:08 +02:00
|
|
|
output = decode(tokens)[0]
|
|
|
|
|
|
|
|
return {
|
|
|
|
'results': [{
|
|
|
|
'text': output
|
|
|
|
}]
|
|
|
|
}
|