Always default mlock to true.

This commit is contained in:
Adam Treat 2023-05-20 21:04:11 -04:00
parent 914519e772
commit b678bc5f6f

View File

@ -53,7 +53,7 @@ bool LLamaModel::loadModel(const std::string &modelPath)
d_ptr->params.seed = params.seed; d_ptr->params.seed = params.seed;
d_ptr->params.f16_kv = params.memory_f16; d_ptr->params.f16_kv = params.memory_f16;
d_ptr->params.use_mmap = params.use_mmap; d_ptr->params.use_mmap = params.use_mmap;
d_ptr->params.use_mlock = params.use_mlock; d_ptr->params.use_mlock = true;
d_ptr->ctx = llama_init_from_file(modelPath.c_str(), d_ptr->params); d_ptr->ctx = llama_init_from_file(modelPath.c_str(), d_ptr->params);
if (!d_ptr->ctx) { if (!d_ptr->ctx) {