llama : deprecate llama_kv_self_ API (#14030)

* llama : deprecate llama_kv_self_ API

ggml-ci

* llama : allow llama_memory_(nullptr)

ggml-ci

* memory : add flag for optional data clear in llama_memory_clear

ggml-ci
This commit is contained in:
Georgi Gerganov 2025-06-06 14:11:15 +03:00 committed by GitHub
parent 487a5e0401
commit 745aa5319b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 206 additions and 127 deletions

View file

@ -43,7 +43,7 @@ public:
bool get_can_shift() const override;
void clear() override;
void clear(bool data) override;
bool seq_rm (llama_seq_id seq_id, llama_pos p0, llama_pos p1) override;
void seq_cp (llama_seq_id seq_id_src, llama_seq_id seq_id_dst, llama_pos p0, llama_pos p1) override;