From abd3314064cd3c513f9eef34c3ba6c23a107442c Mon Sep 17 00:00:00 2001 From: Tristan Druyen Date: Wed, 24 Apr 2024 10:52:37 +0200 Subject: llama : add phi 3 chat template (#6857) * Add phi 3 chat template & tests * test : fix chat template result --------- Co-authored-by: Georgi Gerganov --- llama.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) (limited to 'llama.cpp') diff --git a/llama.cpp b/llama.cpp index 30fe1903..e4ca34bd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -17447,6 +17447,15 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|start_header_id|>assistant<|end_header_id|>\n\n"; } + } else if (tmpl == "phi3" || (tmpl.find("<|assistant|>") != std::string::npos && tmpl.find("<|end|>") != std::string::npos )) { + // Phi 3 + for (auto message : chat) { + std::string role(message->role); + ss << "<|" << role << "|>\n" << trim(message->content) << "<|end|>\n"; + } + if (add_ass) { + ss << "<|assistant|>\n"; + } } else { // template not supported return -1; -- cgit v1.2.3