Start moving toward a single authoritative class for all settings. This

is necessary to get rid of technical debt before we drastically increase
the complexity of settings by adding per model settings and mirostat and
other fun things. Right now the settings are divided between QML and C++
and some convenience methods to deal with settings sync and so on that are
in other singletons. This change consolidates all the logic for settings
into a single class with a single API for both C++ and QML.
This commit is contained in:
Adam Treat 2023-06-28 13:47:57 -04:00 committed by AT
parent 390994ea5e
commit 705b480d72
4 changed files with 440 additions and 143 deletions

View File

@ -21,7 +21,6 @@ int main(int argc, char *argv[])
QCoreApplication::setOrganizationDomain("gpt4all.io"); QCoreApplication::setOrganizationDomain("gpt4all.io");
QCoreApplication::setApplicationName("GPT4All"); QCoreApplication::setApplicationName("GPT4All");
QCoreApplication::setApplicationVersion(APP_VERSION); QCoreApplication::setApplicationVersion(APP_VERSION);
QSettings::setDefaultFormat(QSettings::IniFormat);
Logger::globalInstance(); Logger::globalInstance();

View File

@ -1,6 +1,52 @@
#include "mysettings.h" #include "mysettings.h"
#include <QDir>
#include <QFile>
#include <QFileInfo>
#include <QSettings> #include <QSettings>
#include <QStandardPaths>
static double default_temperature = 0.7;
static double default_topP = 0.1;
static int default_topK = 40;
static int default_maxLength = 4096;
static int default_promptBatchSize = 128;
static double default_repeatPenalty = 1.18;
static int default_repeatPenaltyTokens = 64;
static QString default_promptTemplate = "### Human:\n%1\n### Assistant:\n";
static int default_threadCount = 0;
static bool default_saveChats = false;
static bool default_saveChatGPTChats = true;
static bool default_serverChat = false;
static QString default_userDefaultModel = "Application default";
static bool default_forceMetal = false;
static QString defaultLocalModelsPath()
{
QString localPath = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation)
+ "/";
QString testWritePath = localPath + QString("test_write.txt");
QString canonicalLocalPath = QFileInfo(localPath).canonicalFilePath() + "/";
QDir localDir(localPath);
if (!localDir.exists()) {
if (!localDir.mkpath(localPath)) {
qWarning() << "ERROR: Local download directory can't be created:" << canonicalLocalPath;
return canonicalLocalPath;
}
}
if (QFileInfo::exists(testWritePath))
return canonicalLocalPath;
QFile testWriteFile(testWritePath);
if (testWriteFile.open(QIODeviceBase::ReadWrite)) {
testWriteFile.close();
return canonicalLocalPath;
}
qWarning() << "ERROR: Local download path appears not writeable:" << canonicalLocalPath;
return canonicalLocalPath;
}
class MyPrivateSettings: public MySettings { }; class MyPrivateSettings: public MySettings { };
Q_GLOBAL_STATIC(MyPrivateSettings, settingsInstance) Q_GLOBAL_STATIC(MyPrivateSettings, settingsInstance)
@ -12,6 +58,282 @@ MySettings *MySettings::globalInstance()
MySettings::MySettings() MySettings::MySettings()
: QObject{nullptr} : QObject{nullptr}
{ {
QSettings::setDefaultFormat(QSettings::IniFormat);
}
void MySettings::restoreGenerationDefaults()
{
setTemperature(default_temperature);
setTopP(default_topP);
setTopK(default_topK);
setMaxLength(default_maxLength);
setPromptBatchSize(default_promptBatchSize);
setRepeatPenalty(default_repeatPenalty);
setRepeatPenaltyTokens(default_repeatPenaltyTokens);
setPromptTemplate(default_promptTemplate);
}
void MySettings::restoreApplicationDefaults()
{
setThreadCount(default_threadCount);
setSaveChats(default_saveChats);
setSaveChatGPTChats(default_saveChatGPTChats);
setServerChat(default_serverChat);
setModelPath(defaultLocalModelsPath());
setUserDefaultModel(default_userDefaultModel);
setForceMetal(default_forceMetal);
}
double MySettings::temperature() const
{
QSettings setting;
setting.sync();
return setting.value("temperature", default_temperature).toDouble();
}
void MySettings::setTemperature(double t)
{
if (temperature() == t)
return;
QSettings setting;
setting.sync();
setting.setValue("temperature", t);
emit temperatureChanged();
}
double MySettings::topP() const
{
QSettings setting;
setting.sync();
return setting.value("topP", default_topP).toDouble();
}
void MySettings::setTopP(double p)
{
if (topP() == p)
return;
QSettings setting;
setting.sync();
setting.setValue("topP", p);
emit topPChanged();
}
int MySettings::topK() const
{
QSettings setting;
setting.sync();
return setting.value("topK", default_topK).toInt();
}
void MySettings::setTopK(int k)
{
if (topK() == k)
return;
QSettings setting;
setting.sync();
setting.setValue("topK", k);
emit topKChanged();
}
int MySettings::maxLength() const
{
QSettings setting;
setting.sync();
return setting.value("maxLength", default_maxLength).toInt();
}
void MySettings::setMaxLength(int l)
{
if (maxLength() == l)
return;
QSettings setting;
setting.sync();
setting.setValue("maxLength", l);
emit maxLengthChanged();
}
int MySettings::promptBatchSize() const
{
QSettings setting;
setting.sync();
return setting.value("promptBatchSize", default_promptBatchSize).toInt();
}
void MySettings::setPromptBatchSize(int s)
{
if (promptBatchSize() == s)
return;
QSettings setting;
setting.sync();
setting.setValue("promptBatchSize", s);
emit promptBatchSizeChanged();
}
double MySettings::repeatPenalty() const
{
QSettings setting;
setting.sync();
return setting.value("repeatPenalty", default_repeatPenalty).toDouble();
}
void MySettings::setRepeatPenalty(double p)
{
if (repeatPenalty() == p)
return;
QSettings setting;
setting.sync();
setting.setValue("repeatPenalty", p);
emit repeatPenaltyChanged();
}
int MySettings::repeatPenaltyTokens() const
{
QSettings setting;
setting.sync();
return setting.value("repeatPenaltyTokens", default_repeatPenaltyTokens).toInt();
}
void MySettings::setRepeatPenaltyTokens(int t)
{
if (repeatPenaltyTokens() == t)
return;
QSettings setting;
setting.sync();
setting.setValue("repeatPenaltyTokens", t);
emit repeatPenaltyTokensChanged();
}
QString MySettings::promptTemplate() const
{
QSettings setting;
setting.sync();
return setting.value("promptTemplate", default_promptTemplate).toString();
}
void MySettings::setPromptTemplate(const QString &t)
{
if (promptTemplate() == t)
return;
QSettings setting;
setting.sync();
setting.setValue("promptTemplate", t);
emit promptTemplateChanged();
}
int MySettings::threadCount() const
{
QSettings setting;
setting.sync();
return setting.value("threadCount", default_threadCount).toInt();
}
void MySettings::setThreadCount(int c)
{
if (threadCount() == c)
return;
QSettings setting;
setting.sync();
setting.setValue("threadCount", c);
emit threadCountChanged();
}
bool MySettings::saveChats() const
{
QSettings setting;
setting.sync();
return setting.value("saveChats", default_saveChats).toBool();
}
void MySettings::setSaveChats(bool b)
{
if (saveChats() == b)
return;
QSettings setting;
setting.sync();
setting.setValue("saveChats", b);
emit saveChatsChanged();
}
bool MySettings::saveChatGPTChats() const
{
QSettings setting;
setting.sync();
return setting.value("saveChatGPTChats", default_saveChatGPTChats).toBool();
}
void MySettings::setSaveChatGPTChats(bool b)
{
if (saveChatGPTChats() == b)
return;
QSettings setting;
setting.sync();
setting.setValue("saveChatGPTChats", b);
emit saveChatGPTChatsChanged();
}
bool MySettings::serverChat() const
{
QSettings setting;
setting.sync();
return setting.value("serverChat", default_serverChat).toBool();
}
void MySettings::setServerChat(bool b)
{
if (serverChat() == b)
return;
QSettings setting;
setting.sync();
setting.setValue("serverChat", b);
emit serverChatChanged();
}
QString MySettings::modelPath() const
{
QSettings setting;
setting.sync();
return setting.value("modelPath", defaultLocalModelsPath()).toString();
}
void MySettings::setModelPath(const QString &p)
{
if (modelPath() == p)
return;
QSettings setting;
setting.sync();
setting.setValue("modelPath", p);
emit modelPathChanged();
}
QString MySettings::userDefaultModel() const
{
QSettings setting;
setting.sync();
return setting.value("userDefaultModel", default_userDefaultModel).toString();
}
void MySettings::setUserDefaultModel(const QString &u)
{
if (userDefaultModel() == u)
return;
QSettings setting;
setting.sync();
setting.setValue("userDefaultModel", u);
emit userDefaultModelChanged();
} }
bool MySettings::forceMetal() const bool MySettings::forceMetal() const
@ -19,10 +341,10 @@ bool MySettings::forceMetal() const
return m_forceMetal; return m_forceMetal;
} }
void MySettings::setForceMetal(bool enabled) void MySettings::setForceMetal(bool b)
{ {
if (m_forceMetal == enabled) if (m_forceMetal == b)
return; return;
m_forceMetal = enabled; m_forceMetal = b;
emit forceMetalChanged(enabled); emit forceMetalChanged(b);
} }

View File

@ -7,15 +7,78 @@
class MySettings : public QObject class MySettings : public QObject
{ {
Q_OBJECT Q_OBJECT
Q_PROPERTY(double temperature READ temperature WRITE setTemperature NOTIFY temperatureChanged)
Q_PROPERTY(double topP READ topP WRITE setTopP NOTIFY topPChanged)
Q_PROPERTY(int topK READ topK WRITE setTopK NOTIFY topKChanged)
Q_PROPERTY(int maxLength READ maxLength WRITE setMaxLength NOTIFY maxLengthChanged)
Q_PROPERTY(int promptBatchSize READ promptBatchSize WRITE setPromptBatchSize NOTIFY promptBatchSizeChanged)
Q_PROPERTY(double repeatPenalty READ repeatPenalty WRITE setRepeatPenalty NOTIFY repeatPenaltyChanged)
Q_PROPERTY(int repeatPenaltyTokens READ repeatPenaltyTokens WRITE setRepeatPenaltyTokens NOTIFY repeatPenaltyTokensChanged)
Q_PROPERTY(QString promptTemplate READ promptTemplate WRITE setPromptTemplate NOTIFY promptTemplateChanged)
Q_PROPERTY(int threadCount READ threadCount WRITE setThreadCount NOTIFY threadCountChanged)
Q_PROPERTY(bool saveChats READ saveChats WRITE setSaveChats NOTIFY saveChatsChanged)
Q_PROPERTY(bool saveChatGPTChats READ saveChatGPTChats WRITE setSaveChatGPTChats NOTIFY saveChatGPTChatsChanged)
Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged)
Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged)
Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged)
Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged) Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged)
public: public:
static MySettings *globalInstance(); static MySettings *globalInstance();
// Restore methods
Q_INVOKABLE void restoreGenerationDefaults();
Q_INVOKABLE void restoreApplicationDefaults();
// Generation settings
double temperature() const;
void setTemperature(double t);
double topP() const;
void setTopP(double p);
int topK() const;
void setTopK(int k);
int maxLength() const;
void setMaxLength(int l);
int promptBatchSize() const;
void setPromptBatchSize(int s);
double repeatPenalty() const;
void setRepeatPenalty(double p);
int repeatPenaltyTokens() const;
void setRepeatPenaltyTokens(int t);
QString promptTemplate() const;
void setPromptTemplate(const QString &t);
// Application settings
int threadCount() const;
void setThreadCount(int c);
bool saveChats() const;
void setSaveChats(bool b);
bool saveChatGPTChats() const;
void setSaveChatGPTChats(bool b);
bool serverChat() const;
void setServerChat(bool b);
QString modelPath() const;
void setModelPath(const QString &p);
QString userDefaultModel() const;
void setUserDefaultModel(const QString &u);
bool forceMetal() const; bool forceMetal() const;
void setForceMetal(bool enabled); void setForceMetal(bool b);
Q_SIGNALS: Q_SIGNALS:
void temperatureChanged();
void topPChanged();
void topKChanged();
void maxLengthChanged();
void promptBatchSizeChanged();
void repeatPenaltyChanged();
void repeatPenaltyTokensChanged();
void promptTemplateChanged();
void threadCountChanged();
void saveChatsChanged();
void saveChatGPTChatsChanged();
void serverChatChanged();
void modelPathChanged();
void userDefaultModelChanged();
void forceMetalChanged(bool); void forceMetalChanged(bool);
private: private:

View File

@ -36,98 +36,27 @@ Dialog {
id: theme id: theme
} }
property real defaultTemperature: 0.7
property real defaultTopP: 0.1
property int defaultTopK: 40
property int defaultMaxLength: 4096
property int defaultPromptBatchSize: 128
property real defaultRepeatPenalty: 1.18
property int defaultRepeatPenaltyTokens: 64
property int defaultThreadCount: 0
property bool defaultSaveChats: false
property bool defaultSaveChatGPTChats: true
property bool defaultServerChat: false
property string defaultPromptTemplate: "### Human:
%1
### Assistant:\n"
property string defaultModelPath: ModelList.defaultLocalModelsPath()
property string defaultUserDefaultModel: "Application default"
property alias temperature: settings.temperature
property alias topP: settings.topP
property alias topK: settings.topK
property alias maxLength: settings.maxLength
property alias promptBatchSize: settings.promptBatchSize
property alias promptTemplate: settings.promptTemplate
property alias repeatPenalty: settings.repeatPenalty
property alias repeatPenaltyTokens: settings.repeatPenaltyTokens
property alias threadCount: settings.threadCount
property alias saveChats: settings.saveChats
property alias saveChatGPTChats: settings.saveChatGPTChats
property alias serverChat: settings.serverChat
property alias modelPath: settings.modelPath
property alias userDefaultModel: settings.userDefaultModel
Settings {
id: settings
property real temperature: settingsDialog.defaultTemperature
property real topP: settingsDialog.defaultTopP
property int topK: settingsDialog.defaultTopK
property int maxLength: settingsDialog.defaultMaxLength
property int promptBatchSize: settingsDialog.defaultPromptBatchSize
property int threadCount: settingsDialog.defaultThreadCount
property bool saveChats: settingsDialog.defaultSaveChats
property bool saveChatGPTChats: settingsDialog.defaultSaveChatGPTChats
property bool serverChat: settingsDialog.defaultServerChat
property real repeatPenalty: settingsDialog.defaultRepeatPenalty
property int repeatPenaltyTokens: settingsDialog.defaultRepeatPenaltyTokens
property string promptTemplate: settingsDialog.defaultPromptTemplate
property string modelPath: settingsDialog.defaultModelPath
property string userDefaultModel: settingsDialog.defaultUserDefaultModel
}
function restoreGenerationDefaults() { function restoreGenerationDefaults() {
settings.temperature = defaultTemperature MySettings.restoreGenerationDefaults();
settings.topP = defaultTopP templateTextArea.text = MySettings.promptTemplate
settings.topK = defaultTopK
settings.maxLength = defaultMaxLength
settings.promptBatchSize = defaultPromptBatchSize
settings.promptTemplate = defaultPromptTemplate
templateTextArea.text = defaultPromptTemplate
settings.repeatPenalty = defaultRepeatPenalty
settings.repeatPenaltyTokens = defaultRepeatPenaltyTokens
settings.sync()
} }
function restoreApplicationDefaults() { function restoreApplicationDefaults() {
settings.modelPath = settingsDialog.defaultModelPath MySettings.restoreApplicationDefaults();
settings.threadCount = defaultThreadCount ModelList.localModelsPath = MySettings.modelPath
settings.saveChats = defaultSaveChats LLM.threadCount = MySettings.threadCount
settings.saveChatGPTChats = defaultSaveChatGPTChats LLM.serverEnabled = MySettings.serverChat
settings.serverChat = defaultServerChat ChatListModel.shouldSaveChats = MySettings.saveChats
settings.userDefaultModel = defaultUserDefaultModel ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
ModelList.localModelsPath = settings.modelPath
LLM.threadCount = settings.threadCount
LLM.serverEnabled = settings.serverChat
ChatListModel.shouldSaveChats = settings.saveChats
ChatListModel.shouldSaveChatGPTChats = settings.saveChatGPTChats
MySettings.forceMetal = false MySettings.forceMetal = false
settings.sync()
} }
Component.onCompleted: { Component.onCompleted: {
LLM.threadCount = settings.threadCount LLM.threadCount = MySettings.threadCount
LLM.serverEnabled = settings.serverChat LLM.serverEnabled = MySettings.serverChat
ChatListModel.shouldSaveChats = settings.saveChats ChatListModel.shouldSaveChats = MySettings.saveChats
ChatListModel.shouldSaveChatGPTChats = settings.saveChatGPTChats ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
ModelList.localModelsPath = settings.modelPath ModelList.localModelsPath = MySettings.modelPath
}
Connections {
target: settingsDialog
function onClosed() {
settings.sync()
}
} }
Item { Item {
@ -309,7 +238,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.temperature.toString() text: MySettings.temperature
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Temperature increases the chances of choosing less likely tokens.\nNOTE: Higher temperature gives more creative but less predictable outputs.") ToolTip.text: qsTr("Temperature increases the chances of choosing less likely tokens.\nNOTE: Higher temperature gives more creative but less predictable outputs.")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -321,11 +250,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseFloat(text) var val = parseFloat(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.temperature = val MySettings.temperature = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.temperature.toString() text = MySettings.temperature
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -340,7 +268,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.topP.toString() text: MySettings.topP
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Only the most likely tokens up to a total probability of top_p can be chosen.\nNOTE: Prevents choosing highly unlikely tokens, aka Nucleus Sampling") ToolTip.text: qsTr("Only the most likely tokens up to a total probability of top_p can be chosen.\nNOTE: Prevents choosing highly unlikely tokens, aka Nucleus Sampling")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -352,11 +280,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseFloat(text) var val = parseFloat(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.topP = val MySettings.topP = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.topP.toString() text = MySettings.topP
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -371,7 +298,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.topK.toString() text: MySettings.topK
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Only the top K most likely tokens will be chosen from") ToolTip.text: qsTr("Only the top K most likely tokens will be chosen from")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -383,11 +310,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseInt(text) var val = parseInt(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.topK = val MySettings.topK = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.topK.toString() text = MySettings.topK
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -402,7 +328,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.maxLength.toString() text: MySettings.maxLength
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Maximum length of response in tokens") ToolTip.text: qsTr("Maximum length of response in tokens")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -414,11 +340,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseInt(text) var val = parseInt(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.maxLength = val MySettings.maxLength = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.maxLength.toString() text = MySettings.maxLength
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -434,7 +359,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.promptBatchSize.toString() text: MySettings.promptBatchSize
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Amount of prompt tokens to process at once.\nNOTE: Higher values can speed up reading prompts but will use more RAM") ToolTip.text: qsTr("Amount of prompt tokens to process at once.\nNOTE: Higher values can speed up reading prompts but will use more RAM")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -446,11 +371,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseInt(text) var val = parseInt(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.promptBatchSize = val MySettings.promptBatchSize = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.promptBatchSize.toString() text = MySettings.promptBatchSize
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -465,7 +389,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.repeatPenalty.toString() text: MySettings.repeatPenalty
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Amount to penalize repetitiveness of the output") ToolTip.text: qsTr("Amount to penalize repetitiveness of the output")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -477,11 +401,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseFloat(text) var val = parseFloat(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.repeatPenalty = val MySettings.repeatPenalty = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.repeatPenalty.toString() text = MySettings.repeatPenalty
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -496,7 +419,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settings.repeatPenaltyTokens.toString() text: MySettings.repeatPenaltyTokens
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("How far back in output to apply repeat penalty") ToolTip.text: qsTr("How far back in output to apply repeat penalty")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -508,11 +431,10 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseInt(text) var val = parseInt(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settings.repeatPenaltyTokens = val MySettings.repeatPenaltyTokens = val
settings.sync()
focus = false focus = false
} else { } else {
text = settings.repeatPenaltyTokens.toString() text = MySettings.repeatPenaltyTokens
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -558,7 +480,7 @@ Dialog {
anchors.fill: parent anchors.fill: parent
TextArea { TextArea {
id: templateTextArea id: templateTextArea
text: settings.promptTemplate text: MySettings.promptTemplate
color: theme.textColor color: theme.textColor
background: Rectangle { background: Rectangle {
implicitWidth: 150 implicitWidth: 150
@ -569,8 +491,7 @@ Dialog {
wrapMode: TextArea.Wrap wrapMode: TextArea.Wrap
onTextChanged: { onTextChanged: {
if (templateTextArea.text.indexOf("%1") !== -1) { if (templateTextArea.text.indexOf("%1") !== -1) {
settings.promptTemplate = text MySettings.promptTemplate = text
settings.sync()
} }
} }
bottomPadding: 10 bottomPadding: 10
@ -633,21 +554,19 @@ Dialog {
Accessible.name: qsTr("ComboBox for displaying/picking the default model") Accessible.name: qsTr("ComboBox for displaying/picking the default model")
Accessible.description: qsTr("Use this for picking the default model to use; the first item is the current default model") Accessible.description: qsTr("Use this for picking the default model to use; the first item is the current default model")
function updateModel() { function updateModel() {
settings.sync(); comboBox.currentIndex = comboBox.indexOfValue(MySettings.userDefaultModel);
comboBox.currentIndex = comboBox.indexOfValue(settingsDialog.userDefaultModel);
} }
Component.onCompleted: { Component.onCompleted: {
comboBox.updateModel() comboBox.updateModel()
} }
Connections { Connections {
target: settings target: MySettings
function onUserDefaultModelChanged() { function onUserDefaultModelChanged() {
comboBox.updateModel() comboBox.updateModel()
} }
} }
onActivated: { onActivated: {
settingsDialog.userDefaultModel = comboBox.currentText MySettings.userDefaultModel = comboBox.currentText
settings.sync()
} }
} }
FolderDialog { FolderDialog {
@ -657,8 +576,7 @@ Dialog {
onAccepted: { onAccepted: {
modelPathDisplayField.text = selectedFolder modelPathDisplayField.text = selectedFolder
ModelList.localModelsPath = modelPathDisplayField.text ModelList.localModelsPath = modelPathDisplayField.text
settings.modelPath = ModelList.localModelsPath MySettings.modelPath = ModelList.localModelsPath
settings.sync()
} }
} }
Label { Label {
@ -683,8 +601,7 @@ Dialog {
onEditingFinished: { onEditingFinished: {
if (isValid) { if (isValid) {
ModelList.localModelsPath = modelPathDisplayField.text ModelList.localModelsPath = modelPathDisplayField.text
settings.modelPath = ModelList.localModelsPath MySettings.modelPath = ModelList.localModelsPath
settings.sync()
} else { } else {
text = ModelList.localModelsPath text = ModelList.localModelsPath
} }
@ -705,7 +622,7 @@ Dialog {
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
text: settingsDialog.threadCount.toString() text: MySettings.threadCount
color: theme.textColor color: theme.textColor
ToolTip.text: qsTr("Amount of processing threads to use, a setting of 0 will use the lesser of 4 or your number of CPU threads") ToolTip.text: qsTr("Amount of processing threads to use, a setting of 0 will use the lesser of 4 or your number of CPU threads")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -717,12 +634,11 @@ Dialog {
onEditingFinished: { onEditingFinished: {
var val = parseInt(text) var val = parseInt(text)
if (!isNaN(val)) { if (!isNaN(val)) {
settingsDialog.threadCount = val MySettings.threadCount = val
LLM.threadCount = val LLM.threadCount = val
settings.sync()
focus = false focus = false
} else { } else {
text = settingsDialog.threadCount.toString() text = MySettings.threadCount
} }
} }
Accessible.role: Accessible.EditableText Accessible.role: Accessible.EditableText
@ -740,12 +656,11 @@ Dialog {
id: saveChatsBox id: saveChatsBox
Layout.row: 4 Layout.row: 4
Layout.column: 1 Layout.column: 1
checked: settingsDialog.saveChats checked: MySettings.saveChats
onClicked: { onClicked: {
Network.sendSaveChatsToggled(saveChatsBox.checked); Network.sendSaveChatsToggled(saveChatsBox.checked);
settingsDialog.saveChats = saveChatsBox.checked MySettings.saveChats = !MySettings.saveChats
ChatListModel.shouldSaveChats = saveChatsBox.checked ChatListModel.shouldSaveChats = saveChatsBox.checked
settings.sync()
} }
ToolTip.text: qsTr("WARNING: Saving chats to disk can be ~2GB per chat") ToolTip.text: qsTr("WARNING: Saving chats to disk can be ~2GB per chat")
ToolTip.visible: hovered ToolTip.visible: hovered
@ -761,11 +676,10 @@ Dialog {
id: saveChatGPTChatsBox id: saveChatGPTChatsBox
Layout.row: 5 Layout.row: 5
Layout.column: 1 Layout.column: 1
checked: settingsDialog.saveChatGPTChats checked: MySettings.saveChatGPTChats
onClicked: { onClicked: {
settingsDialog.saveChatGPTChats = saveChatGPTChatsBox.checked MySettings.saveChatGPTChats = !MySettings.saveChatGPTChats
ChatListModel.shouldSaveChatGPTChats = saveChatGPTChatsBox.checked ChatListModel.shouldSaveChatGPTChats = saveChatGPTChatsBox.checked
settings.sync()
} }
} }
Label { Label {
@ -779,11 +693,10 @@ Dialog {
id: serverChatBox id: serverChatBox
Layout.row: 6 Layout.row: 6
Layout.column: 1 Layout.column: 1
checked: settings.serverChat checked: MySettings.serverChat
onClicked: { onClicked: {
settingsDialog.serverChat = serverChatBox.checked MySettings.serverChat = !MySettings.serverChat
LLM.serverEnabled = serverChatBox.checked LLM.serverEnabled = serverChatBox.checked
settings.sync()
} }
ToolTip.text: qsTr("WARNING: This enables the gui to act as a local REST web server(OpenAI API compliant) for API requests and will increase your RAM usage as well") ToolTip.text: qsTr("WARNING: This enables the gui to act as a local REST web server(OpenAI API compliant) for API requests and will increase your RAM usage as well")
ToolTip.visible: hovered ToolTip.visible: hovered