summaryrefslogtreecommitdiff
path: root/llama.cpp
diff options
context:
space:
mode:
authorkaizau <kaizau@users.noreply.github.com>2024-04-03 23:24:31 +0800
committerGitHub <noreply@github.com>2024-04-03 17:24:31 +0200
commit1ff4d9f3d683f02ef8a12e04bfac84300c44bd3a (patch)
tree009636b2cdb905fac501121c6234d0a8d0b09698 /llama.cpp
parent076b08649ecc3b0e1c0709c2a086a63eddd1bf32 (diff)
Add OpenChat, Alpaca, Vicuna chat templates (#6397)
* Add openchat chat template * Add chat template test for openchat * Add chat template for vicuna * Add chat template for orca-vicuna * Add EOS for vicuna templates * Combine vicuna chat templates * Add tests for openchat and vicuna chat templates * Add chat template for alpaca * Add separate template name for vicuna-orca * Remove alpaca, match deepseek with jinja output * Regenerate chat template test with add_generation_prompt * Separate deepseek bos from system message * Match openchat template with jinja output * Remove BOS token from templates, unprefix openchat
Diffstat (limited to 'llama.cpp')
-rw-r--r--llama.cpp49
1 files changed, 49 insertions, 0 deletions
diff --git a/llama.cpp b/llama.cpp
index 2df03f99..08ec7332 100644
--- a/llama.cpp
+++ b/llama.cpp
@@ -15837,6 +15837,55 @@ static int32_t llama_chat_apply_template_internal(
ss << message->content << "</s>";
}
}
+ } else if (tmpl == "openchat" || tmpl.find("GPT4 Correct ") != std::string::npos) {
+ // openchat/openchat-3.5-0106,
+ for (auto message : chat) {
+ std::string role(message->role);
+ if (role == "system") {
+ ss << message->content << "<|end_of_turn|>";
+ } else {
+ role[0] = toupper(role[0]);
+ ss << "GPT4 Correct " << role << ": " << message->content << "<|end_of_turn|>";
+ }
+ }
+ if (add_ass) {
+ ss << "GPT4 Correct Assistant:";
+ }
+ } else if (tmpl == "vicuna" || tmpl == "vicuna-orca" || (tmpl.find("USER: ") != std::string::npos && tmpl.find("ASSISTANT: ") != std::string::npos)) {
+ // eachadea/vicuna-13b-1.1 (and Orca variant)
+ for (auto message : chat) {
+ std::string role(message->role);
+ if (role == "system") {
+ // Orca-Vicuna variant uses a system prefix
+ if (tmpl == "vicuna-orca" || tmpl.find("SYSTEM: ") != std::string::npos) {
+ ss << "SYSTEM: " << message->content << "\n";
+ } else {
+ ss << message->content << "\n\n";
+ }
+ } else if (role == "user") {
+ ss << "USER: " << message->content << "\n";
+ } else if (role == "assistant") {
+ ss << "ASSISTANT: " << message->content << "</s>\n";
+ }
+ }
+ if (add_ass) {
+ ss << "ASSISTANT:";
+ }
+ } else if (tmpl == "deepseek" || (tmpl.find("### Instruction:") != std::string::npos && tmpl.find("<|EOT|>") != std::string::npos)) {
+ // deepseek-ai/deepseek-coder-33b-instruct
+ for (auto message : chat) {
+ std::string role(message->role);
+ if (role == "system") {
+ ss << message->content;
+ } else if (role == "user") {
+ ss << "### Instruction:\n" << message->content << "\n";
+ } else if (role == "assistant") {
+ ss << "### Response:\n" << message->content << "\n<|EOT|>\n";
+ }
+ }
+ if (add_ass) {
+ ss << "### Response:\n";
+ }
} else {
// template not supported
return -1;