diff options
Diffstat (limited to 'llama.cpp')
-rw-r--r-- | llama.cpp | 5 |
1 files changed, 5 insertions, 0 deletions
@@ -16221,6 +16221,11 @@ struct llama_context * llama_new_context_with_model( params.flash_attn = false; } + if (params.type_v != GGML_TYPE_F16 && !params.flash_attn) { + LLAMA_LOG_ERROR("%s: V cache quantization requires flash_attn\n", __func__); + return nullptr; + } + llama_context * ctx = new llama_context(*model); const auto & hparams = model->hparams; |