Simplify.

This commit is contained in:
Adam Treat 2023-05-09 11:46:33 -04:00
parent 53a39b9ecf
commit 9c008fb677

View File

@ -76,14 +76,11 @@ bool ChatLLM::loadModel(const QString &modelName)
if (isModelLoaded() && m_modelName == modelName) if (isModelLoaded() && m_modelName == modelName)
return true; return true;
bool isFirstLoad = false;
if (isModelLoaded()) { if (isModelLoaded()) {
resetContextPrivate(); resetContextPrivate();
delete m_llmodel; delete m_llmodel;
m_llmodel = nullptr; m_llmodel = nullptr;
emit isModelLoadedChanged(); emit isModelLoadedChanged();
} else {
isFirstLoad = true;
} }
bool isGPTJ = false; bool isGPTJ = false;
@ -122,9 +119,11 @@ bool ChatLLM::loadModel(const QString &modelName)
emit isModelLoadedChanged(); emit isModelLoadedChanged();
if (isFirstLoad) static bool isFirstLoad = false;
if (isFirstLoad) {
emit sendStartup(); emit sendStartup();
else isFirstLoad = false;
} else
emit sendModelLoaded(); emit sendModelLoaded();
} else { } else {
const QString error = QString("Could not find model %1").arg(modelName); const QString error = QString("Could not find model %1").arg(modelName);