2023-09-15 23:11:16 -04:00
|
|
|
from modules.text_generation import decode, encode
|
|
|
|
|
2023-07-12 14:33:25 -04:00
|
|
|
|
2023-07-11 17:50:08 -04:00
|
|
|
def token_count(prompt):
|
|
|
|
tokens = encode(prompt)[0]
|
|
|
|
return {
|
2023-11-07 22:05:36 -05:00
|
|
|
'length': len(tokens)
|
2023-07-11 17:50:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-11-07 22:05:36 -05:00
|
|
|
def token_encode(input):
|
2023-07-11 17:50:08 -04:00
|
|
|
tokens = encode(input)[0]
|
2023-11-07 22:05:36 -05:00
|
|
|
if tokens.__class__.__name__ in ['Tensor', 'ndarray']:
|
|
|
|
tokens = tokens.tolist()
|
2023-07-11 17:50:08 -04:00
|
|
|
|
|
|
|
return {
|
2023-11-07 22:05:36 -05:00
|
|
|
'tokens': tokens,
|
|
|
|
'length': len(tokens),
|
2023-07-11 17:50:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-11-07 22:05:36 -05:00
|
|
|
def token_decode(tokens):
|
|
|
|
output = decode(tokens)
|
2023-07-11 17:50:08 -04:00
|
|
|
return {
|
2023-11-07 22:05:36 -05:00
|
|
|
'text': output
|
2023-07-11 17:50:08 -04:00
|
|
|
}
|