mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
Turn the chat list into a model.
This commit is contained in:
parent
679b61ee07
commit
a48226613c
@ -58,8 +58,9 @@ set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
|
||||
|
||||
qt_add_executable(chat
|
||||
main.cpp
|
||||
chat.h chat.cpp chatmodel.h
|
||||
chat.h chat.cpp
|
||||
chatllm.h chatllm.cpp
|
||||
chatmodel.h chatlistmodel.h
|
||||
download.h download.cpp
|
||||
network.h network.cpp
|
||||
llm.h llm.cpp
|
||||
|
129
chatlistmodel.h
Normal file
129
chatlistmodel.h
Normal file
@ -0,0 +1,129 @@
|
||||
#ifndef CHATLISTMODEL_H
|
||||
#define CHATLISTMODEL_H
|
||||
|
||||
#include <QAbstractListModel>
|
||||
#include "chat.h"
|
||||
|
||||
class ChatListModel : public QAbstractListModel
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(int count READ count NOTIFY countChanged)
|
||||
Q_PROPERTY(Chat *currentChat READ currentChat WRITE setCurrentChat NOTIFY currentChatChanged)
|
||||
|
||||
public:
|
||||
explicit ChatListModel(QObject *parent = nullptr)
|
||||
: QAbstractListModel(parent)
|
||||
{
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
}
|
||||
|
||||
enum Roles {
|
||||
IdRole = Qt::UserRole + 1,
|
||||
NameRole
|
||||
};
|
||||
|
||||
int rowCount(const QModelIndex &parent = QModelIndex()) const override
|
||||
{
|
||||
Q_UNUSED(parent)
|
||||
return m_chats.size();
|
||||
}
|
||||
|
||||
QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override
|
||||
{
|
||||
if (!index.isValid() || index.row() < 0 || index.row() >= m_chats.size())
|
||||
return QVariant();
|
||||
|
||||
const Chat *item = m_chats.at(index.row());
|
||||
switch (role) {
|
||||
case IdRole:
|
||||
return item->id();
|
||||
case NameRole:
|
||||
return item->name();
|
||||
}
|
||||
|
||||
return QVariant();
|
||||
}
|
||||
|
||||
QHash<int, QByteArray> roleNames() const override
|
||||
{
|
||||
QHash<int, QByteArray> roles;
|
||||
roles[IdRole] = "id";
|
||||
roles[NameRole] = "name";
|
||||
return roles;
|
||||
}
|
||||
|
||||
Q_INVOKABLE Chat* addChat()
|
||||
{
|
||||
Chat *newChat = new Chat(this);
|
||||
beginInsertRows(QModelIndex(), m_chats.size(), m_chats.size());
|
||||
m_chats.append(newChat);
|
||||
endInsertRows();
|
||||
emit countChanged();
|
||||
setCurrentChat(newChat);
|
||||
return newChat;
|
||||
}
|
||||
|
||||
Q_INVOKABLE void removeChat(Chat* chat)
|
||||
{
|
||||
if (!m_chats.contains(chat)) {
|
||||
qDebug() << "WARNING: Removing chat failed with id" << chat->id();
|
||||
return;
|
||||
}
|
||||
|
||||
const bool chatIsCurrent = chat == m_currentChat;
|
||||
emit disconnectChat(chat);
|
||||
const int index = m_chats.indexOf(chat);
|
||||
beginRemoveRows(QModelIndex(), index, index);
|
||||
m_chats.removeAll(chat);
|
||||
endRemoveRows();
|
||||
delete chat;
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
else
|
||||
setCurrentChat(m_chats.first());
|
||||
}
|
||||
|
||||
Chat *currentChat() const
|
||||
{
|
||||
return m_currentChat;
|
||||
}
|
||||
|
||||
void setCurrentChat(Chat *chat)
|
||||
{
|
||||
if (!m_chats.contains(chat)) {
|
||||
qDebug() << "ERROR: Setting current chat failed with id" << chat->id();
|
||||
return;
|
||||
}
|
||||
|
||||
if (m_currentChat) {
|
||||
Q_ASSERT(m_currentChat);
|
||||
emit disconnect(m_currentChat);
|
||||
}
|
||||
|
||||
emit connectChat(chat);
|
||||
m_currentChat = chat;
|
||||
emit currentChatChanged();
|
||||
}
|
||||
|
||||
Q_INVOKABLE Chat* get(int index)
|
||||
{
|
||||
if (index < 0 || index >= m_chats.size()) return nullptr;
|
||||
return m_chats.at(index);
|
||||
}
|
||||
|
||||
|
||||
int count() const { return m_chats.size(); }
|
||||
|
||||
Q_SIGNALS:
|
||||
void countChanged();
|
||||
void connectChat(Chat *);
|
||||
void disconnectChat(Chat *);
|
||||
void currentChatChanged();
|
||||
|
||||
private:
|
||||
Chat* m_currentChat;
|
||||
QList<Chat*> m_chats;
|
||||
};
|
||||
|
||||
#endif // CHATITEMMODEL_H
|
@ -16,7 +16,6 @@ class ChatLLM : public QObject
|
||||
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
|
||||
|
||||
public:
|
||||
|
||||
ChatLLM();
|
||||
|
||||
bool isModelLoaded() const;
|
||||
|
87
llm.cpp
87
llm.cpp
@ -1,8 +1,6 @@
|
||||
#include "llm.h"
|
||||
#include "download.h"
|
||||
#include "network.h"
|
||||
#include "llmodel/gptj.h"
|
||||
#include "llmodel/llamamodel.h"
|
||||
|
||||
#include <QCoreApplication>
|
||||
#include <QDir>
|
||||
@ -21,16 +19,20 @@ LLM *LLM::globalInstance()
|
||||
|
||||
LLM::LLM()
|
||||
: QObject{nullptr}
|
||||
, m_chatListModel(new ChatListModel(this))
|
||||
{
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
connect(Download::globalInstance(), &Download::modelListChanged,
|
||||
this, &LLM::modelListChanged, Qt::QueuedConnection);
|
||||
connect(m_chatListModel, &ChatListModel::connectChat,
|
||||
this, &LLM::connectChat, Qt::QueuedConnection);
|
||||
connect(m_chatListModel, &ChatListModel::disconnectChat,
|
||||
this, &LLM::disconnectChat, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
QList<QString> LLM::modelList() const
|
||||
{
|
||||
Q_ASSERT(currentChat());
|
||||
Q_ASSERT(m_chatListModel->currentChat());
|
||||
const Chat *currentChat = m_chatListModel->currentChat();
|
||||
// Build a model list from exepath and from the localpath
|
||||
QList<QString> list;
|
||||
|
||||
@ -46,7 +48,7 @@ QList<QString> LLM::modelList() const
|
||||
QFileInfo info(filePath);
|
||||
QString name = info.completeBaseName().remove(0, 5);
|
||||
if (info.exists()) {
|
||||
if (name == currentChat()->modelName())
|
||||
if (name == currentChat->modelName())
|
||||
list.prepend(name);
|
||||
else
|
||||
list.append(name);
|
||||
@ -63,7 +65,7 @@ QList<QString> LLM::modelList() const
|
||||
QFileInfo info(filePath);
|
||||
QString name = info.completeBaseName().remove(0, 5);
|
||||
if (info.exists() && !list.contains(name)) { // don't allow duplicates
|
||||
if (name == currentChat()->modelName())
|
||||
if (name == currentChat->modelName())
|
||||
list.prepend(name);
|
||||
else
|
||||
list.append(name);
|
||||
@ -109,75 +111,8 @@ bool LLM::checkForUpdates() const
|
||||
|
||||
bool LLM::isRecalc() const
|
||||
{
|
||||
Q_ASSERT(currentChat());
|
||||
return currentChat()->isRecalc();
|
||||
}
|
||||
|
||||
Chat *LLM::currentChat() const
|
||||
{
|
||||
return chatFromId(m_currentChat);
|
||||
}
|
||||
|
||||
QList<QString> LLM::chatList() const
|
||||
{
|
||||
return m_chats.keys();
|
||||
}
|
||||
|
||||
QString LLM::addChat()
|
||||
{
|
||||
Chat *newChat = new Chat(this);
|
||||
m_chats.insert(newChat->id(), newChat);
|
||||
emit chatListChanged();
|
||||
setCurrentChatFromId(newChat->id());
|
||||
return newChat->id();
|
||||
}
|
||||
|
||||
void LLM::removeChat(const QString &id)
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "WARNING: Removing chat with id" << id;
|
||||
return;
|
||||
}
|
||||
|
||||
const bool chatIsCurrent = id == m_currentChat;
|
||||
Chat *chat = m_chats.value(id);
|
||||
disconnectChat(chat);
|
||||
m_chats.remove(id);
|
||||
emit chatListChanged();
|
||||
delete chat;
|
||||
if (m_chats.isEmpty())
|
||||
addChat();
|
||||
else
|
||||
setCurrentChatFromId(chatList().first());
|
||||
}
|
||||
|
||||
Chat *LLM::chatFromId(const QString &id) const
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "WARNING: Getting chat from id" << id;
|
||||
return nullptr;
|
||||
}
|
||||
return m_chats.value(id);
|
||||
}
|
||||
|
||||
void LLM::setCurrentChatFromId(const QString &id)
|
||||
{
|
||||
if (!m_chats.contains(id)) {
|
||||
qDebug() << "ERROR: Setting current chat from id" << id;
|
||||
return;
|
||||
}
|
||||
|
||||
// On load this can be empty as we add a new chat in ctor this method will be called
|
||||
if (!m_currentChat.isEmpty()) {
|
||||
Chat *curr = currentChat();
|
||||
Q_ASSERT(curr);
|
||||
disconnect(curr);
|
||||
}
|
||||
|
||||
Chat *newCurr = m_chats.value(id);
|
||||
connectChat(newCurr);
|
||||
m_currentChat = id;
|
||||
emit currentChatChanged();
|
||||
Q_ASSERT(m_chatListModel->currentChat());
|
||||
return m_chatListModel->currentChat()->isRecalc();
|
||||
}
|
||||
|
||||
void LLM::connectChat(Chat *chat)
|
||||
|
20
llm.h
20
llm.h
@ -4,44 +4,36 @@
|
||||
#include <QObject>
|
||||
|
||||
#include "chat.h"
|
||||
#include "chatlistmodel.h"
|
||||
|
||||
class LLM : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QList<QString> modelList READ modelList NOTIFY modelListChanged)
|
||||
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
|
||||
Q_PROPERTY(Chat *currentChat READ currentChat NOTIFY currentChatChanged)
|
||||
Q_PROPERTY(QList<QString> chatList READ chatList NOTIFY chatListChanged)
|
||||
Q_PROPERTY(ChatListModel *chatListModel READ chatListModel NOTIFY chatListModelChanged)
|
||||
|
||||
public:
|
||||
|
||||
static LLM *globalInstance();
|
||||
|
||||
QList<QString> modelList() const;
|
||||
bool isRecalc() const;
|
||||
Chat *currentChat() const;
|
||||
QList<QString> chatList() const;
|
||||
ChatListModel *chatListModel() const { return m_chatListModel; }
|
||||
|
||||
Q_INVOKABLE QString addChat();
|
||||
Q_INVOKABLE void removeChat(const QString &id);
|
||||
Q_INVOKABLE Chat *chatFromId(const QString &id) const;
|
||||
Q_INVOKABLE void setCurrentChatFromId(const QString &id);
|
||||
Q_INVOKABLE bool checkForUpdates() const;
|
||||
|
||||
Q_SIGNALS:
|
||||
void modelListChanged();
|
||||
void currentChatChanged();
|
||||
void recalcChanged();
|
||||
void chatListChanged();
|
||||
void responseChanged();
|
||||
void chatListModelChanged();
|
||||
|
||||
private:
|
||||
private Q_SLOTS:
|
||||
void connectChat(Chat *chat);
|
||||
void disconnectChat(Chat *chat);
|
||||
|
||||
private:
|
||||
QString m_currentChat;
|
||||
QMap<QString, Chat*> m_chats;
|
||||
ChatListModel *m_chatListModel;
|
||||
|
||||
private:
|
||||
explicit LLM();
|
||||
|
63
main.qml
63
main.qml
@ -18,7 +18,8 @@ Window {
|
||||
id: theme
|
||||
}
|
||||
|
||||
property var chatModel: LLM.currentChat.chatModel
|
||||
property var currentChat: LLM.chatListModel.currentChat
|
||||
property var chatModel: currentChat.chatModel
|
||||
|
||||
color: theme.textColor
|
||||
|
||||
@ -94,13 +95,13 @@ Window {
|
||||
Item {
|
||||
anchors.centerIn: parent
|
||||
height: childrenRect.height
|
||||
visible: LLM.currentChat.isModelLoaded
|
||||
visible: currentChat.isModelLoaded
|
||||
|
||||
Label {
|
||||
id: modelLabel
|
||||
color: theme.textColor
|
||||
padding: 20
|
||||
font.pixelSize: 24
|
||||
font.pixelSize: theme.fontSizeLarger
|
||||
text: ""
|
||||
background: Rectangle {
|
||||
color: theme.backgroundDarkest
|
||||
@ -169,17 +170,17 @@ Window {
|
||||
}
|
||||
|
||||
onActivated: {
|
||||
LLM.currentChat.stopGenerating()
|
||||
LLM.currentChat.modelName = comboBox.currentText
|
||||
LLM.currentChat.reset();
|
||||
currentChat.stopGenerating()
|
||||
currentChat.modelName = comboBox.currentText
|
||||
currentChat.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BusyIndicator {
|
||||
anchors.centerIn: parent
|
||||
visible: !LLM.currentChat.isModelLoaded
|
||||
running: !LLM.currentChat.isModelLoaded
|
||||
visible: !currentChat.isModelLoaded
|
||||
running: !currentChat.isModelLoaded
|
||||
Accessible.role: Accessible.Animation
|
||||
Accessible.name: qsTr("Busy indicator")
|
||||
Accessible.description: qsTr("Displayed when the model is loading")
|
||||
@ -409,7 +410,7 @@ Window {
|
||||
var string = item.name;
|
||||
var isResponse = item.name === qsTr("Response: ")
|
||||
if (item.currentResponse)
|
||||
string += LLM.currentChat.response
|
||||
string += currentChat.response
|
||||
else
|
||||
string += chatModel.get(i).value
|
||||
if (isResponse && item.stopped)
|
||||
@ -427,7 +428,7 @@ Window {
|
||||
var isResponse = item.name === qsTr("Response: ")
|
||||
str += "{\"content\": ";
|
||||
if (item.currentResponse)
|
||||
str += JSON.stringify(LLM.currentChat.response)
|
||||
str += JSON.stringify(currentChat.response)
|
||||
else
|
||||
str += JSON.stringify(item.value)
|
||||
str += ", \"role\": \"" + (isResponse ? "assistant" : "user") + "\"";
|
||||
@ -471,7 +472,7 @@ Window {
|
||||
}
|
||||
|
||||
onClicked: {
|
||||
LLM.currentChat.reset();
|
||||
currentChat.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@ -555,14 +556,14 @@ Window {
|
||||
Accessible.description: qsTr("This is the list of prompt/response pairs comprising the actual conversation with the model")
|
||||
|
||||
delegate: TextArea {
|
||||
text: currentResponse ? LLM.currentChat.response : (value ? value : "")
|
||||
text: currentResponse ? currentChat.response : (value ? value : "")
|
||||
width: listView.width
|
||||
color: theme.textColor
|
||||
wrapMode: Text.WordWrap
|
||||
focus: false
|
||||
readOnly: true
|
||||
font.pixelSize: theme.fontSizeLarge
|
||||
cursorVisible: currentResponse ? (LLM.currentChat.response !== "" ? LLM.currentChat.responseInProgress : false) : false
|
||||
cursorVisible: currentResponse ? (currentChat.response !== "" ? currentChat.responseInProgress : false) : false
|
||||
cursorPosition: text.length
|
||||
background: Rectangle {
|
||||
color: name === qsTr("Response: ") ? theme.backgroundLighter : theme.backgroundLight
|
||||
@ -582,8 +583,8 @@ Window {
|
||||
anchors.leftMargin: 90
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 5
|
||||
visible: (currentResponse ? true : false) && LLM.currentChat.response === "" && LLM.currentChat.responseInProgress
|
||||
running: (currentResponse ? true : false) && LLM.currentChat.response === "" && LLM.currentChat.responseInProgress
|
||||
visible: (currentResponse ? true : false) && currentChat.response === "" && currentChat.responseInProgress
|
||||
running: (currentResponse ? true : false) && currentChat.response === "" && currentChat.responseInProgress
|
||||
|
||||
Accessible.role: Accessible.Animation
|
||||
Accessible.name: qsTr("Busy indicator")
|
||||
@ -614,7 +615,7 @@ Window {
|
||||
window.height / 2 - height / 2)
|
||||
x: globalPoint.x
|
||||
y: globalPoint.y
|
||||
property string text: currentResponse ? LLM.currentChat.response : (value ? value : "")
|
||||
property string text: currentResponse ? currentChat.response : (value ? value : "")
|
||||
response: newResponse === undefined || newResponse === "" ? text : newResponse
|
||||
onAccepted: {
|
||||
var responseHasChanged = response !== text && response !== newResponse
|
||||
@ -624,13 +625,13 @@ Window {
|
||||
chatModel.updateNewResponse(index, response)
|
||||
chatModel.updateThumbsUpState(index, false)
|
||||
chatModel.updateThumbsDownState(index, true)
|
||||
Network.sendConversation(LLM.currentChat.id, getConversationJson());
|
||||
Network.sendConversation(currentChat.id, getConversationJson());
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
visible: name === qsTr("Response: ") &&
|
||||
(!currentResponse || !LLM.currentChat.responseInProgress) && Network.isActive
|
||||
(!currentResponse || !currentChat.responseInProgress) && Network.isActive
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 20
|
||||
anchors.top: parent.top
|
||||
@ -656,7 +657,7 @@ Window {
|
||||
chatModel.updateNewResponse(index, "")
|
||||
chatModel.updateThumbsUpState(index, true)
|
||||
chatModel.updateThumbsDownState(index, false)
|
||||
Network.sendConversation(LLM.currentChat.id, getConversationJson());
|
||||
Network.sendConversation(currentChat.id, getConversationJson());
|
||||
}
|
||||
}
|
||||
|
||||
@ -729,27 +730,27 @@ Window {
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 15
|
||||
source: LLM.currentChat.responseInProgress ? "qrc:/gpt4all/icons/stop_generating.svg" : "qrc:/gpt4all/icons/regenerate.svg"
|
||||
source: currentChat.responseInProgress ? "qrc:/gpt4all/icons/stop_generating.svg" : "qrc:/gpt4all/icons/regenerate.svg"
|
||||
}
|
||||
leftPadding: 50
|
||||
onClicked: {
|
||||
var index = Math.max(0, chatModel.count - 1);
|
||||
var listElement = chatModel.get(index);
|
||||
|
||||
if (LLM.currentChat.responseInProgress) {
|
||||
if (currentChat.responseInProgress) {
|
||||
listElement.stopped = true
|
||||
LLM.currentChat.stopGenerating()
|
||||
currentChat.stopGenerating()
|
||||
} else {
|
||||
LLM.currentChat.regenerateResponse()
|
||||
currentChat.regenerateResponse()
|
||||
if (chatModel.count) {
|
||||
if (listElement.name === qsTr("Response: ")) {
|
||||
chatModel.updateCurrentResponse(index, true);
|
||||
chatModel.updateStopped(index, false);
|
||||
chatModel.updateValue(index, LLM.currentChat.response);
|
||||
chatModel.updateValue(index, currentChat.response);
|
||||
chatModel.updateThumbsUpState(index, false);
|
||||
chatModel.updateThumbsDownState(index, false);
|
||||
chatModel.updateNewResponse(index, "");
|
||||
LLM.currentChat.prompt(listElement.prompt, settingsDialog.promptTemplate,
|
||||
currentChat.prompt(listElement.prompt, settingsDialog.promptTemplate,
|
||||
settingsDialog.maxLength,
|
||||
settingsDialog.topK, settingsDialog.topP,
|
||||
settingsDialog.temperature,
|
||||
@ -765,7 +766,7 @@ Window {
|
||||
anchors.bottomMargin: 40
|
||||
padding: 15
|
||||
contentItem: Text {
|
||||
text: LLM.currentChat.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response")
|
||||
text: currentChat.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response")
|
||||
color: theme.textColor
|
||||
Accessible.role: Accessible.Button
|
||||
Accessible.name: text
|
||||
@ -793,7 +794,7 @@ Window {
|
||||
color: theme.textColor
|
||||
padding: 20
|
||||
rightPadding: 40
|
||||
enabled: LLM.currentChat.isModelLoaded
|
||||
enabled: currentChat.isModelLoaded
|
||||
wrapMode: Text.WordWrap
|
||||
font.pixelSize: theme.fontSizeLarge
|
||||
placeholderText: qsTr("Send a message...")
|
||||
@ -817,16 +818,16 @@ Window {
|
||||
if (textInput.text === "")
|
||||
return
|
||||
|
||||
LLM.currentChat.stopGenerating()
|
||||
currentChat.stopGenerating()
|
||||
|
||||
if (chatModel.count) {
|
||||
var index = Math.max(0, chatModel.count - 1);
|
||||
var listElement = chatModel.get(index);
|
||||
chatModel.updateCurrentResponse(index, false);
|
||||
chatModel.updateValue(index, LLM.currentChat.response);
|
||||
chatModel.updateValue(index, currentChat.response);
|
||||
}
|
||||
LLM.currentChat.newPromptResponsePair(textInput.text);
|
||||
LLM.currentChat.prompt(textInput.text, settingsDialog.promptTemplate,
|
||||
currentChat.newPromptResponsePair(textInput.text);
|
||||
currentChat.prompt(textInput.text, settingsDialog.promptTemplate,
|
||||
settingsDialog.maxLength,
|
||||
settingsDialog.topK,
|
||||
settingsDialog.topP,
|
||||
|
@ -84,9 +84,10 @@ bool Network::packageAndSendJson(const QString &ingestId, const QString &json)
|
||||
}
|
||||
|
||||
Q_ASSERT(doc.isObject());
|
||||
Q_ASSERT(LLM::globalInstance()->chatListModel()->currentChat());
|
||||
QJsonObject object = doc.object();
|
||||
object.insert("source", "gpt4all-chat");
|
||||
object.insert("agent_id", LLM::globalInstance()->currentChat()->modelName());
|
||||
object.insert("agent_id", LLM::globalInstance()->chatListModel()->currentChat()->modelName());
|
||||
object.insert("submitter_id", m_uniqueId);
|
||||
object.insert("ingest_id", ingestId);
|
||||
|
||||
@ -220,6 +221,7 @@ void Network::sendMixpanelEvent(const QString &ev)
|
||||
if (!m_usageStatsActive)
|
||||
return;
|
||||
|
||||
Q_ASSERT(LLM::globalInstance()->chatListModel()->currentChat());
|
||||
QJsonObject properties;
|
||||
properties.insert("token", "ce362e568ddaee16ed243eaffb5860a2");
|
||||
properties.insert("time", QDateTime::currentSecsSinceEpoch());
|
||||
@ -230,7 +232,7 @@ void Network::sendMixpanelEvent(const QString &ev)
|
||||
properties.insert("ip", m_ipify);
|
||||
properties.insert("name", QCoreApplication::applicationName() + " v"
|
||||
+ QCoreApplication::applicationVersion());
|
||||
properties.insert("model", LLM::globalInstance()->currentChat()->modelName());
|
||||
properties.insert("model", LLM::globalInstance()->chatListModel()->currentChat()->modelName());
|
||||
|
||||
QJsonObject event;
|
||||
event.insert("event", ev);
|
||||
|
@ -25,27 +25,68 @@ Drawer {
|
||||
|
||||
Item {
|
||||
anchors.fill: parent
|
||||
anchors.margins: 30
|
||||
anchors.margins: 10
|
||||
|
||||
Accessible.role: Accessible.Pane
|
||||
Accessible.name: qsTr("Drawer on the left of the application")
|
||||
Accessible.description: qsTr("Drawer that is revealed by pressing the hamburger button")
|
||||
|
||||
Label {
|
||||
Button {
|
||||
id: newChat
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
padding: 15
|
||||
font.pixelSize: theme.fontSizeLarger
|
||||
background: Rectangle {
|
||||
color: theme.backgroundDarkest
|
||||
opacity: .5
|
||||
border.color: theme.backgroundLightest
|
||||
border.width: 1
|
||||
radius: 10
|
||||
}
|
||||
contentItem: Text {
|
||||
text: qsTr("New chat")
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
color: theme.textColor
|
||||
|
||||
Accessible.role: Accessible.Button
|
||||
Accessible.name: text
|
||||
Accessible.description: qsTr("Use this to launch an external application that will check for updates to the installer")
|
||||
}
|
||||
onClicked: {
|
||||
LLM.chatListModel.addChat();
|
||||
}
|
||||
}
|
||||
|
||||
ListView {
|
||||
id: conversationList
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
anchors.top: parent.top
|
||||
wrapMode: Text.WordWrap
|
||||
text: qsTr("Chat lists of specific conversations coming soon! Check back often for new features :)")
|
||||
color: theme.textColor
|
||||
anchors.topMargin: 10
|
||||
anchors.top: newChat.bottom
|
||||
anchors.bottom: checkForUpdatesButton.top
|
||||
model: LLM.chatListModel
|
||||
|
||||
Accessible.role: Accessible.Paragraph
|
||||
Accessible.name: qsTr("Coming soon")
|
||||
Accessible.description: text
|
||||
delegate: Label {
|
||||
id: chatLabel
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
color: theme.textColor
|
||||
padding: 15
|
||||
font.pixelSize: theme.fontSizeLarger
|
||||
text: name
|
||||
background: Rectangle {
|
||||
color: index % 2 === 0 ? theme.backgroundLight : theme.backgroundLighter
|
||||
}
|
||||
horizontalAlignment: TextInput.AlignLeft
|
||||
}
|
||||
|
||||
Accessible.role: Accessible.List
|
||||
Accessible.name: qsTr("List of chats")
|
||||
Accessible.description: qsTr("List of chats in the drawer dialog")
|
||||
}
|
||||
|
||||
Label {
|
||||
/*Label {
|
||||
id: discordLink
|
||||
textFormat: Text.RichText
|
||||
anchors.left: parent.left
|
||||
@ -78,13 +119,14 @@ Drawer {
|
||||
Accessible.role: Accessible.Paragraph
|
||||
Accessible.name: qsTr("Thank you blurb")
|
||||
Accessible.description: qsTr("Contains embedded link to https://home.nomic.ai")
|
||||
}
|
||||
}*/
|
||||
|
||||
Button {
|
||||
id: checkForUpdatesButton
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
anchors.bottom: downloadButton.top
|
||||
anchors.bottomMargin: 20
|
||||
anchors.bottomMargin: 10
|
||||
padding: 15
|
||||
contentItem: Text {
|
||||
text: qsTr("Check for updates...")
|
||||
|
Loading…
Reference in New Issue
Block a user