summaryrefslogtreecommitdiff
path: root/llama.cpp
diff options
context:
space:
mode:
authorSrihari-mcw <96763064+Srihari-mcw@users.noreply.github.com>2024-05-19 19:18:39 -0700
committerGitHub <noreply@github.com>2024-05-20 12:18:39 +1000
commit33c8d50accd6dca73c9c4af00a05e24209c160fe (patch)
tree926d8b9ad683420872afb234f5cd94dba3a3c500 /llama.cpp
parentd359f30921a9f62a0fd299c412ff3f270286fea6 (diff)
Add provisions for windows support for BF16 code including CMake provision for enabling AVX512_BF16 (#7258)
Diffstat (limited to 'llama.cpp')
-rw-r--r--llama.cpp1
1 files changed, 1 insertions, 0 deletions
diff --git a/llama.cpp b/llama.cpp
index 102bc202..ca3e9fcc 100644
--- a/llama.cpp
+++ b/llama.cpp
@@ -18074,6 +18074,7 @@ const char * llama_print_system_info(void) {
s += "AVX512 = " + std::to_string(ggml_cpu_has_avx512()) + " | ";
s += "AVX512_VBMI = " + std::to_string(ggml_cpu_has_avx512_vbmi()) + " | ";
s += "AVX512_VNNI = " + std::to_string(ggml_cpu_has_avx512_vnni()) + " | ";
+ s += "AVX512_BF16 = " + std::to_string(ggml_cpu_has_avx512_bf16()) + " | ";
s += "FMA = " + std::to_string(ggml_cpu_has_fma()) + " | ";
s += "NEON = " + std::to_string(ggml_cpu_has_neon()) + " | ";
s += "ARM_FMA = " + std::to_string(ggml_cpu_has_arm_fma()) + " | ";