From 29fa60ec3eaa2cc1663d211d83bc8c3b121e9dd7 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Fri, 26 Jan 2024 11:36:41 +0000 Subject: [PATCH] Trying to fix that flaky test. --- router/src/lib.rs | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/router/src/lib.rs b/router/src/lib.rs index 2bfbbacd..6c16c4b3 100644 --- a/router/src/lib.rs +++ b/router/src/lib.rs @@ -548,26 +548,12 @@ pub(crate) struct ErrorResponse { #[cfg(test)] mod tests { - use std::io::Write; use tokenizers::Tokenizer; pub(crate) async fn get_tokenizer() -> Tokenizer { - let filename = std::path::Path::new("tokenizer.json"); - if !filename.exists() { - let content = reqwest::get("https://huggingface.co/gpt2/raw/main/tokenizer.json") - .await - .unwrap() - .bytes() - .await - .unwrap(); - let tmp_filename = "tokenizer.json.temp"; - let mut file = std::fs::File::create(tmp_filename).unwrap(); - file.write_all(&content).unwrap(); - // Re-check if another process has written this file maybe. - if !filename.exists() { - std::fs::rename(tmp_filename, filename).unwrap() - } - } - Tokenizer::from_file("tokenizer.json").unwrap() + let api = hf_hub::api::sync::Api::new().unwrap(); + let repo = api.model("gpt2".to_string()); + let filename = repo.get("tokenizer.json").unwrap(); + Tokenizer::from_file(filename).unwrap() } }