diff options
Diffstat (limited to 'include/llama.h')
-rw-r--r-- | include/llama.h | 8 |
1 files changed, 8 insertions, 0 deletions
diff --git a/include/llama.h b/include/llama.h index 607a590d..89526276 100644 --- a/include/llama.h +++ b/include/llama.h @@ -1208,6 +1208,14 @@ extern "C" { llama_token_data_array * candidates, float temp); + /// @details XTC sampler as described in https://github.com/oobabooga/text-generation-webui/pull/6335 + LLAMA_API void llama_sample_xtc( + struct llama_context * ctx, + llama_token_data_array * candidates_p, + float probability, + float threshold, + size_t min_keep); + /// @details Mirostat 1.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words. /// @param candidates A vector of `llama_token_data` containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text. /// @param tau The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text. |