mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
Provide convenience methods for adding/removing/changing chat.
This commit is contained in:
parent
8f80f8e3a2
commit
679b61ee07
101
llm.cpp
101
llm.cpp
@ -21,22 +21,16 @@ LLM *LLM::globalInstance()
|
||||
|
||||
LLM::LLM()
|
||||
: QObject{nullptr}
|
||||
, m_currentChat(new Chat)
|
||||
{
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
connect(Download::globalInstance(), &Download::modelListChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
// FIXME: These should be moved to connect whenever we make a new chat object in future
|
||||
connect(m_currentChat, &Chat::modelNameChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
connect(m_currentChat, &Chat::recalcChanged,
|
||||
this, &LLM::recalcChanged, Qt::QueuedConnection);
|
||||
connect(m_currentChat, &Chat::responseChanged,
|
||||
this, &LLM::responseChanged, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
QList<QString> LLM::modelList() const
|
||||
{
|
||||
Q_ASSERT(m_currentChat);
|
||||
Q_ASSERT(currentChat());
|
||||
// Build a model list from exepath and from the localpath
|
||||
QList<QString> list;
|
||||
|
||||
@ -52,7 +46,7 @@ QList<QString> LLM::modelList() const
|
||||
QFileInfo info(filePath);
|
||||
QString name = info.completeBaseName().remove(0, 5);
|
||||
if (info.exists()) {
|
||||
if (name == m_currentChat->modelName())
|
||||
if (name == currentChat()->modelName())
|
||||
list.prepend(name);
|
||||
else
|
||||
list.append(name);
|
||||
@ -69,7 +63,7 @@ QList<QString> LLM::modelList() const
|
||||
QFileInfo info(filePath);
|
||||
QString name = info.completeBaseName().remove(0, 5);
|
||||
if (info.exists() && !list.contains(name)) { // don't allow duplicates
|
||||
if (name == m_currentChat->modelName())
|
||||
if (name == currentChat()->modelName())
|
||||
list.prepend(name);
|
||||
else
|
||||
list.append(name);
|
||||
@ -115,7 +109,88 @@ bool LLM::checkForUpdates() const
|
||||
|
||||
bool LLM::isRecalc() const
|
||||
{
|
||||
Q_ASSERT(m_currentChat);
|
||||
return m_currentChat->isRecalc();
|
||||
Q_ASSERT(currentChat());
|
||||
return currentChat()->isRecalc();
|
||||
}
|
||||
|
||||
Chat *LLM::currentChat() const
|
||||
{
|
||||
return chatFromId(m_currentChat);
|
||||
}
|
||||
|
||||
QList<QString> LLM::chatList() const
|
||||
{
|
||||
return m_chats.keys();
|
||||
}
|
||||
|
||||
QString LLM::addChat()
|
||||
{
|
||||
Chat *newChat = new Chat(this);
|
||||
m_chats.insert(newChat->id(), newChat);
|
||||
emit chatListChanged();
|
||||
setCurrentChatFromId(newChat->id());
|
||||
return newChat->id();
|
||||
}
|
||||
|
||||
void LLM::removeChat(const QString &id)
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "WARNING: Removing chat with id" << id;
|
||||
return;
|
||||
}
|
||||
|
||||
const bool chatIsCurrent = id == m_currentChat;
|
||||
Chat *chat = m_chats.value(id);
|
||||
disconnectChat(chat);
|
||||
m_chats.remove(id);
|
||||
emit chatListChanged();
|
||||
delete chat;
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
else
|
||||
setCurrentChatFromId(chatList().first());
|
||||
}
|
||||
|
||||
Chat *LLM::chatFromId(const QString &id) const
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "WARNING: Getting chat from id" << id;
|
||||
return nullptr;
|
||||
}
|
||||
return m_chats.value(id);
|
||||
}
|
||||
|
||||
void LLM::setCurrentChatFromId(const QString &id)
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "ERROR: Setting current chat from id" << id;
|
||||
return;
|
||||
}
|
||||
|
||||
// On load this can be empty as we add a new chat in ctor this method will be called
|
||||
if (!m_currentChat.isEmpty()) {
|
||||
Chat *curr = currentChat();
|
||||
Q_ASSERT(curr);
|
||||
disconnect(curr);
|
||||
}
|
||||
|
||||
Chat *newCurr = m_chats.value(id);
|
||||
connectChat(newCurr);
|
||||
m_currentChat = id;
|
||||
emit currentChatChanged();
|
||||
}
|
||||
|
||||
void LLM::connectChat(Chat *chat)
|
||||
{
|
||||
connect(chat, &Chat::modelNameChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
connect(chat, &Chat::recalcChanged,
|
||||
this, &LLM::recalcChanged, Qt::QueuedConnection);
|
||||
connect(chat, &Chat::responseChanged,
|
||||
this, &LLM::responseChanged, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void LLM::disconnectChat(Chat *chat)
|
||||
{
|
||||
disconnect(chat);
|
||||
}
|
||||
|
21
llm.h
21
llm.h
@ -9,26 +9,39 @@ class LLM : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QList<QString> modelList READ modelList NOTIFY modelListChanged)
|
||||
Q_PROPERTY(Chat *currentChat READ currentChat NOTIFY currentChatChanged)
|
||||
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
|
||||
Q_PROPERTY(Chat *currentChat READ currentChat NOTIFY currentChatChanged)
|
||||
Q_PROPERTY(QList<QString> chatList READ chatList NOTIFY chatListChanged)
|
||||
|
||||
public:
|
||||
|
||||
static LLM *globalInstance();
|
||||
|
||||
QList<QString> modelList() const;
|
||||
Q_INVOKABLE bool checkForUpdates() const;
|
||||
Chat *currentChat() const { return m_currentChat; }
|
||||
bool isRecalc() const;
|
||||
Chat *currentChat() const;
|
||||
QList<QString> chatList() const;
|
||||
|
||||
Q_INVOKABLE QString addChat();
|
||||
Q_INVOKABLE void removeChat(const QString &id);
|
||||
Q_INVOKABLE Chat *chatFromId(const QString &id) const;
|
||||
Q_INVOKABLE void setCurrentChatFromId(const QString &id);
|
||||
Q_INVOKABLE bool checkForUpdates() const;
|
||||
|
||||
Q_SIGNALS:
|
||||
void modelListChanged();
|
||||
void currentChatChanged();
|
||||
void recalcChanged();
|
||||
void chatListChanged();
|
||||
void responseChanged();
|
||||
|
||||
private:
|
||||
Chat *m_currentChat;
|
||||
void connectChat(Chat *chat);
|
||||
void disconnectChat(Chat *chat);
|
||||
|
||||
private:
|
||||
QString m_currentChat;
|
||||
QMap<QString, Chat*> m_chats;
|
||||
|
||||
private:
|
||||
explicit LLM();
|
||||
|
Loading…
Reference in New Issue
Block a user