Get the backend as well as the client building/working with msvc.

This commit is contained in:
Adam Treat 2023-05-25 15:22:45 -04:00
parent 63f57635d8
commit 474c5387f9
5 changed files with 15 additions and 9 deletions

View File

@ -1,4 +1,5 @@
cmake_minimum_required(VERSION 3.16)
set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON)
if(APPLE)
option(BUILD_UNIVERSAL "Build a Universal binary on macOS" ON)

View File

@ -1,5 +1,8 @@
cmake_minimum_required(VERSION 3.16)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
if(APPLE)
option(BUILD_UNIVERSAL "Build a Universal binary on macOS" OFF)
if(BUILD_UNIVERSAL)

View File

@ -370,7 +370,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
QString modelFilename = obj["filename"].toString();
QString modelFilesize = obj["filesize"].toString();
QString requires = obj["requires"].toString();
QString requiresVersion = obj["requires"].toString();
QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
bool bestGPTJ = obj.contains("bestGPTJ") && obj["bestGPTJ"] == QString("true");
@ -378,9 +378,9 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true");
QString description = obj["description"].toString();
if (!requires.isEmpty()
&& requires != currentVersion
&& compareVersions(requires, currentVersion)) {
if (!requiresVersion.isEmpty()
&& requiresVersion != currentVersion
&& compareVersions(requiresVersion, currentVersion)) {
continue;
}
@ -409,7 +409,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
modelInfo.bestLlama = bestLlama;
modelInfo.bestMPT = bestMPT;
modelInfo.description = description;
modelInfo.requires = requires;
modelInfo.requiresVersion = requiresVersion;
m_modelMap.insert(modelInfo.filename, modelInfo);
}
@ -423,7 +423,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
modelInfo.isChatGPT = true;
modelInfo.filename = "chatgpt-gpt-3.5-turbo";
modelInfo.description = tr("OpenAI's ChatGPT model gpt-3.5-turbo. ") + chatGPTDesc;
modelInfo.requires = "2.4.2";
modelInfo.requiresVersion = "2.4.2";
QString filePath = downloadLocalModelsPath() + modelInfo.filename + ".txt";
QFileInfo info(filePath);
modelInfo.installed = info.exists();
@ -435,7 +435,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData)
modelInfo.isChatGPT = true;
modelInfo.filename = "chatgpt-gpt-4";
modelInfo.description = tr("OpenAI's ChatGPT model gpt-4. ") + chatGPTDesc;
modelInfo.requires = "2.4.2";
modelInfo.requiresVersion = "2.4.2";
QString filePath = downloadLocalModelsPath() + modelInfo.filename + ".txt";
QFileInfo info(filePath);
modelInfo.installed = info.exists();

View File

@ -22,7 +22,7 @@ struct ModelInfo {
Q_PROPERTY(bool bestMPT MEMBER bestMPT)
Q_PROPERTY(bool isChatGPT MEMBER isChatGPT)
Q_PROPERTY(QString description MEMBER description)
Q_PROPERTY(QString requires MEMBER requires)
Q_PROPERTY(QString requiresVersion MEMBER requiresVersion)
public:
QString filename;
@ -36,7 +36,7 @@ public:
bool bestMPT = false;
bool isChatGPT = false;
QString description;
QString requires;
QString requiresVersion;
};
Q_DECLARE_METATYPE(ModelInfo)

View File

@ -3,6 +3,7 @@
int main(int argc, char *argv[])
{
#if __GNUC__
static bool avx = __builtin_cpu_supports("avx");
static bool avx2 = __builtin_cpu_supports("avx2");
static bool fma = __builtin_cpu_supports("fma");
@ -25,5 +26,6 @@ int main(int argc, char *argv[])
fflush(stdout);
fprintf(stderr, "\" version of gpt4all.\n");
fflush(stderr);
#endif
return 0;
}