2023-04-15 15:57:32 -04:00
|
|
|
#ifndef LLAMAMODEL_H
|
|
|
|
#define LLAMAMODEL_H
|
|
|
|
|
|
|
|
#include <string>
|
|
|
|
#include <functional>
|
|
|
|
#include <vector>
|
|
|
|
#include "llmodel.h"
|
|
|
|
|
|
|
|
class LLamaPrivate;
|
|
|
|
class LLamaModel : public LLModel {
|
|
|
|
public:
|
|
|
|
LLamaModel();
|
|
|
|
~LLamaModel();
|
|
|
|
|
|
|
|
bool loadModel(const std::string &modelPath) override;
|
|
|
|
bool loadModel(const std::string &modelPath, std::istream &fin) override;
|
|
|
|
bool isModelLoaded() const override;
|
2023-04-25 08:38:29 -04:00
|
|
|
void prompt(const std::string &prompt,
|
2023-04-27 11:08:15 -04:00
|
|
|
std::function<bool(int32_t)> promptCallback,
|
|
|
|
std::function<bool(int32_t, const std::string&)> responseCallback,
|
|
|
|
std::function<bool(bool)> recalculateCallback,
|
2023-04-25 08:38:29 -04:00
|
|
|
PromptContext &ctx) override;
|
2023-04-15 15:57:32 -04:00
|
|
|
void setThreadCount(int32_t n_threads) override;
|
|
|
|
int32_t threadCount() override;
|
|
|
|
|
2023-04-25 11:20:51 -04:00
|
|
|
protected:
|
|
|
|
void recalculateContext(PromptContext &promptCtx,
|
|
|
|
std::function<bool(bool)> recalculate) override;
|
|
|
|
|
2023-04-15 15:57:32 -04:00
|
|
|
private:
|
|
|
|
LLamaPrivate *d_ptr;
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif // LLAMAMODEL_H
|