allow converting huggingface::tokenizers error to TensorRtLlmBackendError

This commit is contained in:
Morgan Funtowicz 2024-07-10 13:56:57 +00:00
parent 40fe2ec0ff
commit 42748d5960
2 changed files with 4 additions and 1 deletions

View File

@ -4,6 +4,8 @@ use text_generation_router::server;
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum TensorRtLlmBackendError { pub enum TensorRtLlmBackendError {
#[error("Tokenizer error: {0}")]
Tokenizer(String),
#[error("Argument validation error: {0}")] #[error("Argument validation error: {0}")]
ArgumentValidation(String), ArgumentValidation(String),
#[error("WebServer error: {0}")] #[error("WebServer error: {0}")]

View File

@ -122,7 +122,8 @@ async fn main() -> Result<(), TensorRtLlmBackendError> {
user_agent: HashMap::new(), user_agent: HashMap::new(),
auth_token, auth_token,
}), }),
)?; ).map_err(|e| TensorRtLlmBackendError::Tokenizer(e.to_string()))?;
let backend = TrtLLmBackend::new(tokenizer, model_id)?; let backend = TrtLLmBackend::new(tokenizer, model_id)?;
server::run( server::run(
backend, backend,