mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
Change the suggestion mode to turn on for tool calls by default.
Signed-off-by: Adam Treat <treat.adam@gmail.com>
This commit is contained in:
parent
dda59a97a6
commit
d2ee235388
@ -770,14 +770,14 @@ bool ChatLLM::prompt(const QList<QString> &collectionList, const QString &prompt
|
||||
|
||||
bool ChatLLM::promptInternal(const QList<QString> &collectionList, const QString &prompt, const QString &promptTemplate,
|
||||
int32_t n_predict, int32_t top_k, float top_p, float min_p, float temp, int32_t n_batch, float repeat_penalty,
|
||||
int32_t repeat_penalty_tokens)
|
||||
int32_t repeat_penalty_tokens, bool isToolCallResponse)
|
||||
{
|
||||
if (!isModelLoaded())
|
||||
return false;
|
||||
|
||||
QList<SourceExcerpt> databaseResults;
|
||||
const int retrievalSize = MySettings::globalInstance()->localDocsRetrievalSize();
|
||||
if (!collectionList.isEmpty()) {
|
||||
if (!collectionList.isEmpty() && !isToolCallResponse) {
|
||||
emit requestRetrieveFromDB(collectionList, prompt, retrievalSize, &databaseResults); // blocks
|
||||
emit sourceExcerptsChanged(databaseResults);
|
||||
}
|
||||
@ -847,7 +847,7 @@ bool ChatLLM::promptInternal(const QList<QString> &collectionList, const QString
|
||||
}
|
||||
|
||||
SuggestionMode mode = MySettings::globalInstance()->suggestionMode();
|
||||
if (mode == SuggestionMode::On || (!databaseResults.isEmpty() && mode == SuggestionMode::LocalDocsOnly))
|
||||
if (mode == SuggestionMode::On || (mode == SuggestionMode::SourceExcerptsOnly && (!databaseResults.isEmpty() || isToolCallResponse)))
|
||||
generateQuestions(elapsed);
|
||||
else
|
||||
emit responseStopped(elapsed);
|
||||
@ -876,7 +876,7 @@ bool ChatLLM::toolCallInternal(const QString &toolCall, int32_t n_predict, int32
|
||||
} else {
|
||||
qWarning() << "WARNING: Could not find the tool for " << toolCall;
|
||||
return promptInternal(QList<QString>()/*collectionList*/, QString() /*prompt*/, promptTemplate,
|
||||
n_predict, top_k, top_p, min_p, temp, n_batch, repeat_penalty, repeat_penalty_tokens);
|
||||
n_predict, top_k, top_p, min_p, temp, n_batch, repeat_penalty, repeat_penalty_tokens, true /*isToolCallResponse*/);
|
||||
}
|
||||
|
||||
const QString apiKey = MySettings::globalInstance()->braveSearchAPIKey();
|
||||
@ -888,7 +888,7 @@ bool ChatLLM::toolCallInternal(const QString &toolCall, int32_t n_predict, int32
|
||||
emit sourceExcerptsChanged(braveResponse.second);
|
||||
|
||||
return promptInternal(QList<QString>()/*collectionList*/, braveResponse.first, promptTemplate,
|
||||
n_predict, top_k, top_p, min_p, temp, n_batch, repeat_penalty, repeat_penalty_tokens);
|
||||
n_predict, top_k, top_p, min_p, temp, n_batch, repeat_penalty, repeat_penalty_tokens, true /*isToolCallResponse*/);
|
||||
}
|
||||
|
||||
void ChatLLM::setShouldBeLoaded(bool b)
|
||||
|
@ -199,7 +199,7 @@ Q_SIGNALS:
|
||||
protected:
|
||||
bool promptInternal(const QList<QString> &collectionList, const QString &prompt, const QString &promptTemplate,
|
||||
int32_t n_predict, int32_t top_k, float top_p, float min_p, float temp, int32_t n_batch, float repeat_penalty,
|
||||
int32_t repeat_penalty_tokens);
|
||||
int32_t repeat_penalty_tokens, bool isToolCallResponse = false);
|
||||
bool toolCallInternal(const QString &toolcall, int32_t n_predict, int32_t top_k, float top_p, float min_p, float temp, int32_t n_batch, float repeat_penalty,
|
||||
int32_t repeat_penalty_tokens);
|
||||
bool handlePrompt(int32_t token);
|
||||
|
@ -51,7 +51,7 @@ static const QVariantMap basicDefaults {
|
||||
{ "saveChatsContext", false },
|
||||
{ "serverChat", false },
|
||||
{ "userDefaultModel", "Application default" },
|
||||
{ "suggestionMode", QVariant::fromValue(SuggestionMode::LocalDocsOnly) },
|
||||
{ "suggestionMode", QVariant::fromValue(SuggestionMode::SourceExcerptsOnly) },
|
||||
{ "localdocs/chunkSize", 512 },
|
||||
{ "localdocs/retrievalSize", 3 },
|
||||
{ "localdocs/showReferences", true },
|
||||
|
@ -21,9 +21,9 @@ namespace MySettingsEnums {
|
||||
* ApplicationSettings.qml, as well as the corresponding name lists in mysettings.cpp */
|
||||
|
||||
enum class SuggestionMode {
|
||||
LocalDocsOnly = 0,
|
||||
On = 1,
|
||||
Off = 2,
|
||||
SourceExcerptsOnly = 0,
|
||||
On = 1,
|
||||
Off = 2,
|
||||
};
|
||||
Q_ENUM_NS(SuggestionMode)
|
||||
|
||||
|
@ -350,7 +350,7 @@ MySettingsTab {
|
||||
Layout.alignment: Qt.AlignRight
|
||||
// NOTE: indices match values of SuggestionMode enum, keep them in sync
|
||||
model: ListModel {
|
||||
ListElement { name: qsTr("When chatting with LocalDocs") }
|
||||
ListElement { name: qsTr("When source excerpts are cited") }
|
||||
ListElement { name: qsTr("Whenever possible") }
|
||||
ListElement { name: qsTr("Never") }
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user