mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-12 12:54:52 +00:00
only tokenizer class instead of hole struct
This commit is contained in:
parent
f965deeee7
commit
30d86ed95a
@ -33,15 +33,7 @@ As of release 2.1.2 this is an example of the data collected:
|
||||
},
|
||||
"ngrok": false,
|
||||
"revision": null,
|
||||
"tokenizer_config": {
|
||||
"add_bos_token": null,
|
||||
"add_eos_token": null,
|
||||
"bos_token": "<s>",
|
||||
"chat_template": null,
|
||||
"completion_template": null,
|
||||
"eos_token": "</s>",
|
||||
"tokenizer_class": "BloomTokenizerFast"
|
||||
},
|
||||
"tokenizer_class": "BloomTokenizerFast",
|
||||
"validation_workers": 2,
|
||||
"waiting_served_ratio": 1.2,
|
||||
"docker_label": "latest",
|
||||
|
@ -327,6 +327,7 @@ async fn main() -> Result<(), RouterError> {
|
||||
tracing::warn!("Could not find tokenizer config locally and no API specified");
|
||||
HubTokenizerConfig::default()
|
||||
});
|
||||
let tokenizer_class = tokenizer_config.tokenizer_class.clone();
|
||||
|
||||
let tokenizer: Option<Tokenizer> = tokenizer_filename.and_then(|filename| {
|
||||
let mut tokenizer = Tokenizer::from_file(filename).ok();
|
||||
@ -387,7 +388,7 @@ async fn main() -> Result<(), RouterError> {
|
||||
let user_agent = if !disable_usage_stats && is_docker {
|
||||
let reducded_args = usage_stats::Args::new(
|
||||
config.clone(),
|
||||
tokenizer_config.clone(),
|
||||
tokenizer_class,
|
||||
max_concurrent_requests,
|
||||
max_best_of,
|
||||
max_stop_sequences,
|
||||
|
@ -60,7 +60,7 @@ impl UsageStatsEvent {
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct Args {
|
||||
model_config: Option<Config>,
|
||||
tokenizer_config: HubTokenizerConfig,
|
||||
tokenizer_config: Option<String>,
|
||||
max_concurrent_requests: usize,
|
||||
max_best_of: usize,
|
||||
max_stop_sequences: usize,
|
||||
@ -86,7 +86,7 @@ pub struct Args {
|
||||
impl Args {
|
||||
pub fn new(
|
||||
model_config: Option<Config>,
|
||||
tokenizer_config: HubTokenizerConfig,
|
||||
tokenizer_config: Option<String>,
|
||||
max_concurrent_requests: usize,
|
||||
max_best_of: usize,
|
||||
max_stop_sequences: usize,
|
||||
|
Loading…
Reference in New Issue
Block a user