From 00c63907931bb08a0ed2b7e38cf44dd290143cb9 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Sat, 25 May 2024 05:04:03 -0400 Subject: main : don't print special tokens with --grammar (#6923) * main : don't print special tokens with --grammar The CLI interface was recently changed to print special control tokens like the stop message one. This token shouldn't be printed if the grammar flag was passed, unless the grammar specifies it, because that breaks shell-scriptability. * main: use seperate stream for control characters * main: use dprintf and add --ctrl-token-no-out and --ctrl-token-fd-out * main: dprintf isn't part of the IEEE POSIX standard. Just use write(). * main: remove --ctrl-token-fd-out in favor for fcntl() based detection * common.cpp: accidentally removed --interactive-first * main: only merge stdout and control token if not in conversation or grammar mode * main: rejig control token descriptor handling * main: must check pipe status on very top of program * main: renamed --no-special from --ctrl-token-no-out and other refactoring * main: refactor ctrl_token_no_out --> no_special * llama: rename llama_token_is_control_token() to llama_token_is_control() * main: remove special token file descriptor feature (#5) --------- Co-authored-by: Brian --- llama.cpp | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'llama.cpp') diff --git a/llama.cpp b/llama.cpp index 85cb3140..989d27b9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -17861,6 +17861,10 @@ bool llama_token_is_eog(const struct llama_model * model, llama_token token) { ); } +bool llama_token_is_control(const struct llama_model * model, llama_token token) { + return llama_is_control_token(model->vocab, token); +} + llama_token llama_token_bos(const struct llama_model * model) { return model->vocab.special_bos_id; } -- cgit v1.2.3