From 52208f5b78fd8cc31d01f440b5f5e250896c1e64 Mon Sep 17 00:00:00 2001 From: Morgan Funtowicz Date: Mon, 4 Nov 2024 23:24:50 +0100 Subject: [PATCH] misc(backend): decrease log verbosity in callback --- backends/llamacpp/src/backend.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backends/llamacpp/src/backend.rs b/backends/llamacpp/src/backend.rs index 08fac675..62b4743d 100644 --- a/backends/llamacpp/src/backend.rs +++ b/backends/llamacpp/src/backend.rs @@ -17,7 +17,7 @@ use tokenizers::Tokenizer; use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; use tokio::time::Instant; use tokio_stream::wrappers::UnboundedReceiverStream; -use tracing::{error, info}; +use tracing::{debug, error, info}; type InferResult = Result; @@ -113,7 +113,7 @@ fn llama_generate_callback( is_final: bool, n_generated_tokens: usize, ) -> bool { - info!("Generated token: {new_token_id} -> logits={new_token_logit}, is_final={is_final} ({n_generated_tokens})"); + debug!("Generated token: {new_token_id} -> logits={new_token_logit}, is_final={is_final} ({n_generated_tokens})"); let ctx = unsafe { &mut *ctx };