From 7e2890fe2cf14270e6e7ecd92500072b4655ab8c Mon Sep 17 00:00:00 2001 From: Morgan Funtowicz Date: Mon, 11 Nov 2024 19:50:11 +0100 Subject: [PATCH] feat(backend): remove unused function --- backends/llamacpp/csrc/backend.cpp | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/backends/llamacpp/csrc/backend.cpp b/backends/llamacpp/csrc/backend.cpp index 837f87ea0..66017fc55 100644 --- a/backends/llamacpp/csrc/backend.cpp +++ b/backends/llamacpp/csrc/backend.cpp @@ -14,19 +14,6 @@ namespace huggingface::tgi::backends::llamacpp { - void llama_batch_fill_prompt(llama_batch &batch, std::span input_tokens) { - for (auto i = 0; i < input_tokens.size(); ++i) { - batch.token[i] = input_tokens[i]; - batch.pos[i] = i; - batch.n_seq_id[i] = 1; - batch.seq_id[i] = nullptr; - batch.logits[i] = false; - ++batch.n_tokens; - } - - batch.logits[batch.n_tokens] = true; - } - llama_sampler_ptr sampling_params_t::into_llama_sampler(const llama_model *model) const { auto *pSampler = llama_sampler_chain_init({.no_perf = false});