mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
Remove leading whitespace in responses.
This commit is contained in:
parent
131cfcdeae
commit
5c0d077f74
@ -378,6 +378,32 @@ bool ChatLLM::isModelLoaded() const
|
||||
return m_llModelInfo.model && m_llModelInfo.model->isModelLoaded();
|
||||
}
|
||||
|
||||
std::string remove_leading_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
});
|
||||
|
||||
if (first_non_whitespace == input.end())
|
||||
return std::string();
|
||||
|
||||
return std::string(first_non_whitespace, input.end());
|
||||
}
|
||||
|
||||
std::string trim_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
});
|
||||
|
||||
if (first_non_whitespace == input.end())
|
||||
return std::string();
|
||||
|
||||
auto last_non_whitespace = std::find_if(input.rbegin(), input.rend(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
}).base();
|
||||
|
||||
return std::string(first_non_whitespace, last_non_whitespace);
|
||||
}
|
||||
|
||||
void ChatLLM::regenerateResponse()
|
||||
{
|
||||
// ChatGPT uses a different semantic meaning for n_past than local models. For ChatGPT, the meaning
|
||||
@ -409,29 +435,6 @@ void ChatLLM::resetContext()
|
||||
m_ctx = LLModel::PromptContext();
|
||||
}
|
||||
|
||||
std::string remove_leading_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
});
|
||||
|
||||
return std::string(first_non_whitespace, input.end());
|
||||
}
|
||||
|
||||
std::string trim_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
});
|
||||
|
||||
if (first_non_whitespace == input.end())
|
||||
return std::string();
|
||||
|
||||
auto last_non_whitespace = std::find_if(input.rbegin(), input.rend(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
}).base();
|
||||
|
||||
return std::string(first_non_whitespace, last_non_whitespace);
|
||||
}
|
||||
|
||||
QString ChatLLM::response() const
|
||||
{
|
||||
return QString::fromStdString(remove_leading_whitespace(m_response));
|
||||
@ -476,7 +479,7 @@ bool ChatLLM::handleResponse(int32_t token, const std::string &response)
|
||||
// check for error
|
||||
if (token < 0) {
|
||||
m_response.append(response);
|
||||
emit responseChanged(QString::fromStdString(m_response));
|
||||
emit responseChanged(QString::fromStdString(remove_leading_whitespace(m_response)));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -486,7 +489,7 @@ bool ChatLLM::handleResponse(int32_t token, const std::string &response)
|
||||
m_timer->inc();
|
||||
Q_ASSERT(!response.empty());
|
||||
m_response.append(response);
|
||||
emit responseChanged(QString::fromStdString(m_response));
|
||||
emit responseChanged(QString::fromStdString(remove_leading_whitespace(m_response)));
|
||||
return !m_stopGenerating;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user