diff options
author | Georgi Gerganov <ggerganov@gmail.com> | 2023-12-06 20:21:59 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-12-06 20:21:59 +0200 |
commit | 05cd6e5036d72d0930de4d8f6be7bce09e8dda24 (patch) | |
tree | 17fcd3e0ed225f347b6658b682395c832b287d16 /examples | |
parent | caa9249217c5fd524b900add5ddcbeaa20cbcb12 (diff) |
server : recognize cache_prompt parameter in OAI API (#4347)
Diffstat (limited to 'examples')
-rw-r--r-- | examples/server/server.cpp | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 911f7bbe..369f81a8 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2387,6 +2387,7 @@ json oaicompat_completion_params_parse( // Map OpenAI parameters to llama.cpp parameters llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.8); llama_params["top_k"] = json_value(body, "top_k", 40); llama_params["top_p"] = json_value(body, "top_p", 0.95); |