text-generation-webui/extensions/openai/tokens.py

37 lines
705 B
Python
Raw Normal View History

2023-09-15 23:11:16 -04:00
from modules.text_generation import decode, encode
2023-07-12 14:33:25 -04:00
def token_count(prompt):
tokens = encode(prompt)[0]
return {
'results': [{
'tokens': len(tokens)
}]
}
def token_encode(input, encoding_format):
2023-07-12 14:33:25 -04:00
# if isinstance(input, list):
tokens = encode(input)[0]
return {
'results': [{
'tokens': tokens,
'length': len(tokens),
}]
}
def token_decode(tokens, encoding_format):
2023-07-12 14:33:25 -04:00
# if isinstance(input, list):
# if encoding_format == "base64":
# tokens = base64_to_float_list(tokens)
output = decode(tokens)[0]
return {
'results': [{
'text': output
}]
}