gpt4all/gpt4all-chat/main.cpp

88 lines
3.0 KiB
C++
Raw Normal View History

2023-06-22 15:44:49 -04:00
#include "chatlistmodel.h"
2023-04-16 19:20:43 -04:00
#include "config.h"
#include "download.h"
#include "llm.h"
#include "localdocs.h"
2023-06-01 10:50:42 -04:00
#include "logger.h"
#include "modellist.h"
#include "mysettings.h"
#include "network.h"
#include "../gpt4all-backend/llmodel.h"
2023-04-08 23:28:39 -04:00
#include <QCoreApplication>
#include <QGuiApplication>
#include <QObject>
#include <QQmlApplicationEngine>
#include <QQmlEngine>
#include <QSettings>
#include <QString>
#include <QTranslator>
#include <QUrl>
#include <Qt>
2023-04-08 23:28:39 -04:00
int main(int argc, char *argv[])
{
2023-04-18 21:10:06 -04:00
QCoreApplication::setOrganizationName("nomic.ai");
QCoreApplication::setOrganizationDomain("gpt4all.io");
QCoreApplication::setApplicationName("GPT4All");
2023-04-16 19:20:43 -04:00
QCoreApplication::setApplicationVersion(APP_VERSION);
QSettings::setDefaultFormat(QSettings::IniFormat);
2023-04-16 19:20:43 -04:00
2023-06-01 10:50:42 -04:00
Logger::globalInstance();
2023-04-08 23:28:39 -04:00
QGuiApplication app(argc, argv);
QTranslator translator;
bool success = translator.load(":/i18n/gpt4all_en.qm");
Q_ASSERT(success);
app.installTranslator(&translator);
2023-04-08 23:28:39 -04:00
QQmlApplicationEngine engine;
QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/";
if (LLM::directoryExists(libDir))
llmodelSearchPaths += ";" + libDir;
#if defined(Q_OS_MAC)
const QString binDir = QCoreApplication::applicationDirPath() + "/../../../";
if (LLM::directoryExists(binDir))
llmodelSearchPaths += ";" + binDir;
const QString frameworksDir = QCoreApplication::applicationDirPath() + "/../Frameworks/";
if (LLM::directoryExists(frameworksDir))
llmodelSearchPaths += ";" + frameworksDir;
#endif
LLModel::Implementation::setImplementationsSearchPath(llmodelSearchPaths.toStdString());
2023-06-27 11:54:34 -04:00
qmlRegisterSingletonInstance("mysettings", 1, 0, "MySettings", MySettings::globalInstance());
2023-06-22 15:44:49 -04:00
qmlRegisterSingletonInstance("modellist", 1, 0, "ModelList", ModelList::globalInstance());
qmlRegisterSingletonInstance("chatlistmodel", 1, 0, "ChatListModel", ChatListModel::globalInstance());
2023-04-08 23:28:39 -04:00
qmlRegisterSingletonInstance("llm", 1, 0, "LLM", LLM::globalInstance());
2023-04-18 21:10:06 -04:00
qmlRegisterSingletonInstance("download", 1, 0, "Download", Download::globalInstance());
qmlRegisterSingletonInstance("network", 1, 0, "Network", Network::globalInstance());
2023-05-18 18:59:10 -04:00
qmlRegisterSingletonInstance("localdocs", 1, 0, "LocalDocs", LocalDocs::globalInstance());
const QUrl url(u"qrc:/gpt4all/main.qml"_qs);
2023-04-08 23:28:39 -04:00
QObject::connect(&engine, &QQmlApplicationEngine::objectCreated,
&app, [url](QObject *obj, const QUrl &objUrl) {
if (!obj && url == objUrl)
QCoreApplication::exit(-1);
}, Qt::QueuedConnection);
engine.load(url);
2023-04-18 21:10:06 -04:00
#if 0
2023-04-08 23:28:39 -04:00
QDirIterator it("qrc:", QDirIterator::Subdirectories);
while (it.hasNext()) {
qDebug() << it.next();
}
#endif
int res = app.exec();
// Make sure ChatLLM threads are joined before global destructors run.
// Otherwise, we can get a heap-use-after-free inside of llama.cpp.
ChatListModel::globalInstance()->destroyChats();
return res;
2023-04-08 23:28:39 -04:00
}