From dbc5b7a0f7defc463ca43ec6eeae43e0a1f2182b Mon Sep 17 00:00:00 2001 From: Morgan Funtowicz Date: Sat, 26 Oct 2024 22:24:05 +0200 Subject: [PATCH] misc(offline): link correctly --- backends/llamacpp/offline/main.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/backends/llamacpp/offline/main.cpp b/backends/llamacpp/offline/main.cpp index 56eb88c54..d8121d3df 100644 --- a/backends/llamacpp/offline/main.cpp +++ b/backends/llamacpp/offline/main.cpp @@ -2,7 +2,6 @@ // Created by mfuntowicz on 10/3/24. // -#include #include #include #include @@ -12,7 +11,7 @@ using namespace huggingface::tgi::backends::llamacpp; -int main(int argc, char** argv) { +int main(int argc, char **argv) { if (argc < 2) { fmt::print("No model folder provider"); return 1; @@ -23,15 +22,16 @@ int main(int argc, char** argv) { const auto prompt = "My name is Morgan"; const auto modelPath = absolute(std::filesystem::path(argv[1])); - if (auto maybeBackend = CreateLlamaCppBackend(modelPath); maybeBackend.has_value()) { + if (auto maybeBackend = TgiLlamaCppBackend::FromGGUF(modelPath); maybeBackend.has_value()) { // Retrieve the backend - const auto& backend = *maybeBackend; + auto [model, context] = *maybeBackend; + auto backend = TgiLlamaCppBackend(model, context); // Generate - const auto promptTokens = backend->Tokenize(prompt); - const auto out = backend->Generate(promptTokens, 30, 1.0, 2.0, 0.0, 32); + const auto promptTokens = backend.Tokenize(prompt); + const auto out = backend.Generate(promptTokens, 30, 1.0, 2.0, 0.0, 32); - if(out.has_value()) + if (out.has_value()) fmt::print(FMT_STRING("Generated: {}"), *out); else { const auto err = out.error();