From 3e693197724c31d53a9b69018c2f1bd0b93ebab2 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 13 Feb 2025 07:07:51 +0100 Subject: [PATCH] llama : update llama_decode_internal ref [no ci] (#11840) This commit updates the comment in llama_kv_cache.h to reflect the change of the function name from llama_decode_internal to llama_decode_impl. --- src/llama-kv-cache.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llama-kv-cache.h b/src/llama-kv-cache.h index dca6f399..1ed688e3 100644 --- a/src/llama-kv-cache.h +++ b/src/llama-kv-cache.h @@ -37,7 +37,7 @@ struct llama_kv_cache { bool can_shift = false; // Note: The value of head isn't only used to optimize searching - // for a free KV slot. llama_decode_internal also uses it, so it + // for a free KV slot. llama_decode_impl also uses it, so it // cannot be freely changed after a slot has been allocated. uint32_t head = 0; uint32_t size = 0;