summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorGeorgi Gerganov <ggerganov@gmail.com>2023-12-06 20:21:59 +0200
committerGitHub <noreply@github.com>2023-12-06 20:21:59 +0200
commit05cd6e5036d72d0930de4d8f6be7bce09e8dda24 (patch)
tree17fcd3e0ed225f347b6658b682395c832b287d16 /examples
parentcaa9249217c5fd524b900add5ddcbeaa20cbcb12 (diff)
server : recognize cache_prompt parameter in OAI API (#4347)
Diffstat (limited to 'examples')
-rw-r--r--examples/server/server.cpp1
1 files changed, 1 insertions, 0 deletions
diff --git a/examples/server/server.cpp b/examples/server/server.cpp
index 911f7bbe..369f81a8 100644
--- a/examples/server/server.cpp
+++ b/examples/server/server.cpp
@@ -2387,6 +2387,7 @@ json oaicompat_completion_params_parse(
// Map OpenAI parameters to llama.cpp parameters
llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt'
+ llama_params["cache_prompt"] = json_value(body, "cache_prompt", false);
llama_params["temperature"] = json_value(body, "temperature", 0.8);
llama_params["top_k"] = json_value(body, "top_k", 40);
llama_params["top_p"] = json_value(body, "top_p", 0.95);