Add new reverse prompt for new localdocs context feature.

This commit is contained in:
Adam Treat 2023-05-20 20:02:14 -04:00 committed by AT
parent 54fc980cb5
commit 9bfff8bfcb
3 changed files with 3 additions and 3 deletions

View File

@ -983,7 +983,7 @@ void GPTJ::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<gpt_vocab::id> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;

View File

@ -179,7 +179,7 @@ void LLamaModel::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<llama_token> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;

View File

@ -908,7 +908,7 @@ void MPT::prompt(const std::string &prompt,
std::string cachedResponse;
std::vector<int> cachedTokens;
std::unordered_set<std::string> reversePrompts
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant" };
= { "### Instruction", "### Prompt", "### Response", "### Human", "### Assistant", "### Context" };
// predict next tokens
int32_t totalPredictions = 0;