mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 12:24:53 +00:00
fix: clippy
This commit is contained in:
parent
a8ba2542d8
commit
2e067fabd3
@ -1824,7 +1824,7 @@ pub async fn run(
|
|||||||
|
|
||||||
let tokenizer: Result<Tokenizer, WebServerError> = {
|
let tokenizer: Result<Tokenizer, WebServerError> = {
|
||||||
use pyo3::prelude::*;
|
use pyo3::prelude::*;
|
||||||
let res = Python::with_gil(|py| -> PyResult<()> {
|
Python::with_gil(|py| -> PyResult<()> {
|
||||||
py_resolve_tokenizer(py, &tokenizer_name, revision.as_deref(), trust_remote_code)?;
|
py_resolve_tokenizer(py, &tokenizer_name, revision.as_deref(), trust_remote_code)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
@ -1834,12 +1834,8 @@ pub async fn run(
|
|||||||
.or_else(|err| {
|
.or_else(|err| {
|
||||||
let out = legacy_tokenizer_handle(config_filename.as_ref());
|
let out = legacy_tokenizer_handle(config_filename.as_ref());
|
||||||
out.ok_or(err)
|
out.ok_or(err)
|
||||||
}).map_err(|_|WebServerError::Tokenizer(
|
})
|
||||||
"Unable to load tokenizer.".to_string(),
|
.map_err(|_| WebServerError::Tokenizer("Unable to load tokenizer.".to_string()))?;
|
||||||
));
|
|
||||||
if res.is_err(){
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
let filename = "out/tokenizer.json";
|
let filename = "out/tokenizer.json";
|
||||||
if let Ok(tok) = tokenizers::Tokenizer::from_file(filename) {
|
if let Ok(tok) = tokenizers::Tokenizer::from_file(filename) {
|
||||||
Ok(Tokenizer::Rust(tok))
|
Ok(Tokenizer::Rust(tok))
|
||||||
|
Loading…
Reference in New Issue
Block a user