diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp index 0f5da45f..181b8452 100644 --- a/gpt4all-chat/chatllm.cpp +++ b/gpt4all-chat/chatllm.cpp @@ -789,7 +789,7 @@ void ChatLLM::processSystemPrompt() return; const std::string systemPrompt = MySettings::globalInstance()->modelSystemPrompt(m_modelInfo).toStdString(); - if (systemPrompt.empty()) { + if (QString::fromStdString(systemPrompt).trimmed().isEmpty()) { m_processedSystemPrompt = true; return; }