diff options
author | Benjamin Findley <39356821+Kartoffelsaft@users.noreply.github.com> | 2024-05-12 19:40:08 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-05-13 12:40:08 +1000 |
commit | e586ee42595500c53938e937b6b6ad5353ad76dc (patch) | |
tree | ed4929e7eba0cc91b2902fc0480ac64edd3c0ec7 /examples/server/utils.hpp | |
parent | cbf75894d256f1861f6409565db599365de3d4b8 (diff) |
change default temperature of OAI compat API from 0 to 1 (#7226)
* change default temperature of OAI compat API from 0 to 1
* make tests explicitly send temperature to OAI API
Diffstat (limited to 'examples/server/utils.hpp')
-rw-r--r-- | examples/server/utils.hpp | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d872b63f..d8a2286e 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -371,7 +371,7 @@ static json oaicompat_completion_params_parse( llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); llama_params["stream"] = json_value(body, "stream", false); - llama_params["temperature"] = json_value(body, "temperature", 0.0); + llama_params["temperature"] = json_value(body, "temperature", 1.0); llama_params["top_p"] = json_value(body, "top_p", 1.0); // Apply chat template to the list of messages |