From e586ee42595500c53938e937b6b6ad5353ad76dc Mon Sep 17 00:00:00 2001 From: Benjamin Findley <39356821+Kartoffelsaft@users.noreply.github.com> Date: Sun, 12 May 2024 19:40:08 -0700 Subject: change default temperature of OAI compat API from 0 to 1 (#7226) * change default temperature of OAI compat API from 0 to 1 * make tests explicitly send temperature to OAI API --- examples/server/utils.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'examples/server/utils.hpp') diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d872b63f..d8a2286e 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -371,7 +371,7 @@ static json oaicompat_completion_params_parse( llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); llama_params["stream"] = json_value(body, "stream", false); - llama_params["temperature"] = json_value(body, "temperature", 0.0); + llama_params["temperature"] = json_value(body, "temperature", 1.0); llama_params["top_p"] = json_value(body, "top_p", 1.0); // Apply chat template to the list of messages -- cgit v1.2.3