Cleaned up implementation management (#787)

* Cleaned up implementation management

* Initialize LLModel::m_implementation to nullptr

* llmodel.h: Moved dlhandle fwd declare above LLModel class
This commit is contained in:
niansa/tuxifan 2023-06-01 16:51:46 +02:00 committed by GitHub
parent ddb5fa718d
commit fc60f0c09c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 29 additions and 43 deletions

View File

@ -1,4 +1,5 @@
#include "llmodel.h"
#include "dlhandle.h"
#include <string>
#include <vector>
@ -20,24 +21,28 @@ static bool requires_avxonly() {
#endif
}
LLModel::Implementation::Implementation(Dlhandle &&dlhandle_) : dlhandle(std::move(dlhandle_)) {
auto get_model_type = dlhandle.get<const char *()>("get_model_type");
LLModel::Implementation::Implementation(Dlhandle &&dlhandle_) : dlhandle(new Dlhandle(std::move(dlhandle_))) {
auto get_model_type = dlhandle->get<const char *()>("get_model_type");
assert(get_model_type);
modelType = get_model_type();
auto get_build_variant = dlhandle.get<const char *()>("get_build_variant");
auto get_build_variant = dlhandle->get<const char *()>("get_build_variant");
assert(get_build_variant);
buildVariant = get_build_variant();
magicMatch = dlhandle.get<bool(std::ifstream&)>("magic_match");
magicMatch = dlhandle->get<bool(std::ifstream&)>("magic_match");
assert(magicMatch);
construct_ = dlhandle.get<LLModel *()>("construct");
construct_ = dlhandle->get<LLModel *()>("construct");
assert(construct_);
}
LLModel::Implementation::~Implementation() {
delete dlhandle;
}
bool LLModel::Implementation::isImplementation(const Dlhandle &dl) {
return dl.get<bool(uint32_t)>("is_g4a_backend_model_implementation");
}
const std::vector<LLModel::Implementation> &LLModel::getImplementationList() {
const std::vector<LLModel::Implementation> &LLModel::implementationList() {
// NOTE: allocated on heap so we leak intentionally on exit so we have a chance to clean up the
// individual models without the cleanup of the static list interfering
static auto* libs = new std::vector<LLModel::Implementation>([] () {
@ -46,12 +51,7 @@ const std::vector<LLModel::Implementation> &LLModel::getImplementationList() {
auto search_in_directory = [&](const std::filesystem::path& path) {
// Iterate over all libraries
for (const auto& f : std::filesystem::directory_iterator(path)) {
// Get path
// FIXME: Remove useless comment and avoid usage of 'auto' where having the type is
// helpful for code readability so someone doesn't have to look up the docs for what
// type is returned by 'path' as it is not std::string
const auto& p = f.path();
// Check extension
const std::filesystem::path& p = f.path();
if (p.extension() != LIB_FILE_EXT) continue;
// Add to list if model implementation
try {
@ -74,29 +74,18 @@ const std::vector<LLModel::Implementation> &LLModel::getImplementationList() {
return *libs;
}
const LLModel::Implementation* LLModel::getImplementation(std::ifstream& f, const std::string& buildVariant) {
// FIXME: Please remove all these useless comments as the code itself is more than enough in these
// instances to tell what is going on
// Iterate over all libraries
for (const auto& i : getImplementationList()) {
const LLModel::Implementation* LLModel::implementation(std::ifstream& f, const std::string& buildVariant) {
for (const auto& i : implementationList()) {
f.seekg(0);
// Check that magic matches
if (!i.magicMatch(f)) {
continue;
}
// Check that build variant is correct
if (buildVariant != i.buildVariant) {
continue;
}
// Looks like we're good to go, return this dlhandle
if (!i.magicMatch(f)) continue;
if (buildVariant != i.buildVariant) continue;
return &i;
}
// Nothing found, so return nothing
return nullptr;
}
LLModel *LLModel::construct(const std::string &modelPath, std::string buildVariant) {
//TODO: Auto-detect
//TODO: Auto-detect CUDA/OpenCL
if (buildVariant == "auto") {
if (requires_avxonly()) {
buildVariant = "avxonly";
@ -108,7 +97,7 @@ LLModel *LLModel::construct(const std::string &modelPath, std::string buildVaria
std::ifstream f(modelPath, std::ios::binary);
if (!f) return nullptr;
// Get correct implementation
auto impl = getImplementation(f, buildVariant);
auto impl = implementation(f, buildVariant);
if (!impl) return nullptr;
f.close();
// Construct and return llmodel implementation

View File

@ -1,8 +1,6 @@
#ifndef LLMODEL_H
#define LLMODEL_H
#include "dlhandle.h" // FIXME: would be nice to move this into implementation file
#include <string>
#include <functional>
#include <vector>
@ -10,24 +8,27 @@
#include <fstream>
#include <cstdint>
class Dlhandle;
class LLModel {
public:
class Implementation {
LLModel *(*construct_)();
public:
// FIXME: Move the whole implementation details to cpp file
Implementation(Dlhandle&&);
~Implementation();
static bool isImplementation(const Dlhandle&);
std::string_view modelType, buildVariant;
bool (*magicMatch)(std::ifstream& f);
Dlhandle dlhandle;
Dlhandle *dlhandle;
// The only way an implementation should be constructed
LLModel *construct() const {
auto fres = construct_();
fres->implementation = this;
fres->m_implementation = this;
return fres;
}
};
@ -64,20 +65,16 @@ public:
virtual void setThreadCount(int32_t /*n_threads*/) {}
virtual int32_t threadCount() const { return 1; }
// FIXME: This is unused??
const Implementation& getImplementation() const {
return *implementation;
const Implementation& implementation() const {
return *m_implementation;
}
// FIXME: Maybe have an 'ImplementationInfo' class for the GUI here, but the DLHandle stuff should
// be hidden in cpp file
// FIXME: Avoid usage of 'get' for getters
static const std::vector<Implementation>& getImplementationList();
static const Implementation *getImplementation(std::ifstream& f, const std::string& buildVariant);
static const std::vector<Implementation>& implementationList();
static const Implementation *implementation(std::ifstream& f, const std::string& buildVariant);
static LLModel *construct(const std::string &modelPath, std::string buildVariant = "default");
protected:
const Implementation *implementation; // FIXME: This is dangling! You don't initialize it in ctor either
const Implementation *m_implementation = nullptr;
virtual void recalculateContext(PromptContext &promptCtx,
std::function<bool(bool)> recalculate) = 0;