From 2b3389677a833cee0880226533a1768b1a9508d2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 5 Jun 2024 11:29:20 +0300 Subject: ggml : refactor rope norm/neox (#7634) * ggml : unify rope norm/neox (CPU) * ggml : fix compile warning * ggml : remove GLM rope mode ggml-ci * metal : better rope implementation ggml-ci * cuda : better rope implementation ggml-ci * naming : n_orig_ctx -> n_ctx_orig ggml-ci * dev : add reminders to update backends ggml-ci * vulkan : fix ggml_rope_ext() usage * cuda : fix array size + indents ggml-ci --- examples/finetune/finetune.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'examples/finetune/finetune.cpp') diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 22425730..71a4333e 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -564,7 +564,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int rope_mode = 0; return ggml_rope_ext(ctx, - t, KQ_pos, nullptr, n_rot, rope_mode, n_ctx, 0, + t, KQ_pos, nullptr, n_rot, rope_mode, n_ctx, rope_freq_base, rope_freq_scale, 0.0f, 1.0f, 0.0f, 0.0f ); }; -- cgit v1.2.3