chore(backend): minor formatting

This commit is contained in:
Morgan Funtowicz 2024-10-23 22:11:58 +02:00
parent 37faeb34b2
commit f9c248657d
2 changed files with 1 additions and 2 deletions

View File

@ -15,10 +15,10 @@
#include "backend.hpp" #include "backend.hpp"
namespace huggingface::tgi::backends::llama { namespace huggingface::tgi::backends::llama {
std::expected<std::unique_ptr<TgiLlamaCppBackend>, TgiLlamaCppBackendError> std::expected<std::unique_ptr<TgiLlamaCppBackend>, TgiLlamaCppBackendError>
CreateLlamaCppBackend(const std::filesystem::path& modelPath) { CreateLlamaCppBackend(const std::filesystem::path& modelPath) {
SPDLOG_DEBUG(FMT_STRING("Loading model from {}"), modelPath); SPDLOG_DEBUG(FMT_STRING("Loading model from {}"), modelPath);
llama_backend_init(); llama_backend_init();
llama_numa_init(ggml_numa_strategy::GGML_NUMA_STRATEGY_NUMACTL); llama_numa_init(ggml_numa_strategy::GGML_NUMA_STRATEGY_NUMACTL);

View File

@ -17,7 +17,6 @@ namespace huggingface::tgi::backends::llama {
MODEL_FILE_DOESNT_EXIST = 1 MODEL_FILE_DOESNT_EXIST = 1
}; };
class TgiLlamaCppBackend { class TgiLlamaCppBackend {
using TokenId = llama_token; using TokenId = llama_token;