diff options
author | Xuan Son Nguyen <thichthat@gmail.com> | 2024-03-15 09:44:57 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-03-15 10:44:57 +0200 |
commit | aab606a11fc0a9740a7f297521c3eef851dfb351 (patch) | |
tree | 58f6b901343c6ddbad604019097b3151e39e774f /llama.cpp | |
parent | b0bc9f4a9da7c19f4779106ea83b23feca747566 (diff) |
llama : add Orion chat template (#6066)
Diffstat (limited to 'llama.cpp')
-rw-r--r-- | llama.cpp | 20 |
1 files changed, 20 insertions, 0 deletions
@@ -14242,6 +14242,26 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<start_of_turn>model\n"; } + } else if (tmpl == "orion" || tmpl.find("'\\n\\nAssistant: ' + eos_token") != std::string::npos) { + // OrionStarAI/Orion-14B-Chat + std::string system_prompt = ""; + for (auto message : chat) { + std::string role(message->role); + if (role == "system") { + // there is no system message support, we will merge it with user prompt + system_prompt = message->content; + continue; + } else if (role == "user") { + ss << "Human: "; + if (!system_prompt.empty()) { + ss << system_prompt << "\n\n"; + system_prompt = ""; + } + ss << message->content << "\n\nAssistant: </s>"; + } else { + ss << message->content << "</s>"; + } + } } else { // template not supported return -1; |