mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
Update replit.cpp
replit_tokenizer_detokenize returnins std::string now Signed-off-by: niansa/tuxifan <tuxifan@posteo.de>
This commit is contained in:
parent
0855c0df1d
commit
47323f8591
@ -146,14 +146,12 @@ std::vector<LLModel::Token> replit_tokenizer_tokenize(replit_tokenizer & tokeniz
|
|||||||
return tokenized.first;
|
return tokenized.first;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view replit_tokenizer_detokenize(replit_tokenizer & tokenizer, const std::vector<LLModel::Token> & tokens) {
|
std::string replit_tokenizer_detokenize(replit_tokenizer & tokenizer, const std::vector<LLModel::Token> & tokens) {
|
||||||
std::string text;
|
std::string text;
|
||||||
for (auto token : tokens) {
|
for (auto token : tokens) {
|
||||||
text += tokenizer.raw_vocab.id_to_token[token];
|
text += tokenizer.raw_vocab.id_to_token[token];
|
||||||
}
|
}
|
||||||
static std::string denormalized_text;
|
return replace_all(text, ws_symbol, " ");
|
||||||
denormalized_text = replace_all(text, ws_symbol, " ");
|
|
||||||
return denormalized_text;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// no defaults for now
|
// no defaults for now
|
||||||
|
Loading…
Reference in New Issue
Block a user