2023-04-13 22:15:40 -04:00
|
|
|
#ifndef LLMODEL_H
|
|
|
|
#define LLMODEL_H
|
2023-06-01 07:57:10 -04:00
|
|
|
|
2023-04-13 22:15:40 -04:00
|
|
|
#include <string>
|
|
|
|
#include <functional>
|
|
|
|
#include <vector>
|
2023-05-31 15:37:25 -04:00
|
|
|
#include <string_view>
|
|
|
|
#include <fstream>
|
2023-05-04 20:01:32 -04:00
|
|
|
#include <cstdint>
|
2023-06-04 08:59:24 -04:00
|
|
|
#include <limits>
|
2023-04-13 22:15:40 -04:00
|
|
|
|
2023-06-01 10:51:46 -04:00
|
|
|
class Dlhandle;
|
|
|
|
|
2023-04-13 22:15:40 -04:00
|
|
|
class LLModel {
|
|
|
|
public:
|
2023-06-04 08:59:24 -04:00
|
|
|
using Token = int32_t;
|
|
|
|
|
2023-05-31 15:37:25 -04:00
|
|
|
class Implementation {
|
|
|
|
LLModel *(*construct_)();
|
2023-04-13 22:15:40 -04:00
|
|
|
|
2023-05-31 15:37:25 -04:00
|
|
|
public:
|
|
|
|
Implementation(Dlhandle&&);
|
2023-06-01 11:16:33 -04:00
|
|
|
Implementation(const Implementation&) = delete;
|
|
|
|
Implementation(Implementation&&);
|
2023-06-01 10:51:46 -04:00
|
|
|
~Implementation();
|
2023-05-31 17:04:01 -04:00
|
|
|
|
2023-05-31 15:37:25 -04:00
|
|
|
static bool isImplementation(const Dlhandle&);
|
|
|
|
|
|
|
|
std::string_view modelType, buildVariant;
|
|
|
|
bool (*magicMatch)(std::ifstream& f);
|
2023-06-01 10:51:46 -04:00
|
|
|
Dlhandle *dlhandle;
|
2023-05-31 15:37:25 -04:00
|
|
|
|
2023-06-01 10:51:46 -04:00
|
|
|
// The only way an implementation should be constructed
|
2023-05-31 15:37:25 -04:00
|
|
|
LLModel *construct() const {
|
|
|
|
auto fres = construct_();
|
2023-06-01 10:51:46 -04:00
|
|
|
fres->m_implementation = this;
|
2023-05-31 15:37:25 -04:00
|
|
|
return fres;
|
|
|
|
}
|
|
|
|
};
|
2023-06-01 07:57:10 -04:00
|
|
|
|
2023-04-13 22:15:40 -04:00
|
|
|
struct PromptContext {
|
2023-04-25 08:38:29 -04:00
|
|
|
std::vector<float> logits; // logits of current context
|
|
|
|
std::vector<int32_t> tokens; // current tokens in the context window
|
|
|
|
int32_t n_past = 0; // number of tokens in past conversation
|
|
|
|
int32_t n_ctx = 0; // number of tokens possible in context window
|
|
|
|
int32_t n_predict = 200;
|
|
|
|
int32_t top_k = 40;
|
|
|
|
float top_p = 0.9f;
|
|
|
|
float temp = 0.9f;
|
|
|
|
int32_t n_batch = 9;
|
|
|
|
float repeat_penalty = 1.10f;
|
|
|
|
int32_t repeat_last_n = 64; // last n tokens to penalize
|
2023-04-25 11:20:51 -04:00
|
|
|
float contextErase = 0.75f; // percent of context to erase if we exceed the context
|
2023-05-31 15:37:25 -04:00
|
|
|
// window
|
2023-04-13 22:15:40 -04:00
|
|
|
};
|
2023-05-31 15:37:25 -04:00
|
|
|
|
|
|
|
explicit LLModel() {}
|
|
|
|
virtual ~LLModel() {}
|
|
|
|
|
|
|
|
virtual bool loadModel(const std::string &modelPath) = 0;
|
|
|
|
virtual bool isModelLoaded() const = 0;
|
|
|
|
virtual size_t stateSize() const { return 0; }
|
|
|
|
virtual size_t saveState(uint8_t */*dest*/) const { return 0; }
|
|
|
|
virtual size_t restoreState(const uint8_t */*src*/) { return 0; }
|
2023-04-25 08:38:29 -04:00
|
|
|
virtual void prompt(const std::string &prompt,
|
2023-06-04 08:59:24 -04:00
|
|
|
std::function<bool(int32_t)> promptCallback,
|
|
|
|
std::function<bool(int32_t, const std::string&)> responseCallback,
|
|
|
|
std::function<bool(bool)> recalculateCallback,
|
|
|
|
PromptContext &ctx);
|
|
|
|
|
2023-05-31 17:04:01 -04:00
|
|
|
virtual void setThreadCount(int32_t /*n_threads*/) {}
|
2023-05-21 16:45:29 -04:00
|
|
|
virtual int32_t threadCount() const { return 1; }
|
2023-04-25 11:20:51 -04:00
|
|
|
|
2023-06-01 10:51:46 -04:00
|
|
|
const Implementation& implementation() const {
|
|
|
|
return *m_implementation;
|
2023-05-31 17:04:01 -04:00
|
|
|
}
|
|
|
|
|
2023-06-01 10:51:46 -04:00
|
|
|
static const std::vector<Implementation>& implementationList();
|
|
|
|
static const Implementation *implementation(std::ifstream& f, const std::string& buildVariant);
|
2023-06-09 10:55:28 -04:00
|
|
|
static LLModel *construct(const std::string &modelPath, std::string buildVariant = "auto");
|
2023-05-31 15:37:25 -04:00
|
|
|
|
2023-06-05 11:19:02 -04:00
|
|
|
static void setImplementationsSearchPath(const std::string& path);
|
|
|
|
static const std::string& implementationsSearchPath();
|
2023-06-02 10:57:21 -04:00
|
|
|
|
2023-04-25 11:20:51 -04:00
|
|
|
protected:
|
2023-06-04 08:59:24 -04:00
|
|
|
// These are pure virtual because subclasses need to implement as the default implementation of
|
|
|
|
// 'prompt' above calls these functions
|
2023-06-04 19:31:00 -04:00
|
|
|
virtual std::vector<Token> tokenize(PromptContext &, const std::string&) const = 0;
|
2023-06-13 07:14:02 -04:00
|
|
|
virtual std::string tokenToString(Token) const = 0;
|
2023-06-04 08:59:24 -04:00
|
|
|
virtual Token sampleToken(PromptContext &ctx) const = 0;
|
|
|
|
virtual bool evalTokens(PromptContext &/*ctx*/, const std::vector<int32_t>& /*tokens*/) const = 0;
|
|
|
|
virtual int32_t contextLength() const = 0;
|
|
|
|
virtual const std::vector<Token>& endTokens() const = 0;
|
|
|
|
|
|
|
|
// This is a helper function called from the default implementation of 'prompt' but it can be
|
|
|
|
// shared by all base classes so it isn't virtual
|
2023-06-01 23:15:58 -04:00
|
|
|
void recalculateContext(PromptContext &promptCtx, std::function<bool(bool)> recalculate);
|
2023-06-02 10:57:21 -04:00
|
|
|
|
2023-06-04 08:59:24 -04:00
|
|
|
const Implementation *m_implementation = nullptr;
|
2023-04-13 22:15:40 -04:00
|
|
|
};
|
2023-04-18 09:46:03 -04:00
|
|
|
#endif // LLMODEL_H
|