mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-10 20:04:52 +00:00
fmt
This commit is contained in:
parent
d20576ae0c
commit
47e0620ab6
@ -128,7 +128,7 @@ impl Client {
|
||||
watermark: true,
|
||||
}),
|
||||
stopping_parameters: Some(StoppingCriteriaParameters {
|
||||
max_new_tokens: max_total_tokens-truncate,
|
||||
max_new_tokens: max_total_tokens - truncate,
|
||||
stop_sequences: vec![],
|
||||
ignore_eos_token: true,
|
||||
}),
|
||||
|
@ -100,7 +100,9 @@ impl ShardedClient {
|
||||
let futures: Vec<_> = self
|
||||
.clients
|
||||
.iter_mut()
|
||||
.map(|client| Box::pin(client.warmup(max_input_length, max_prefill_tokens, max_total_tokens)))
|
||||
.map(|client| {
|
||||
Box::pin(client.warmup(max_input_length, max_prefill_tokens, max_total_tokens))
|
||||
})
|
||||
.collect();
|
||||
// Take the minimum value
|
||||
let results = join_all(futures)
|
||||
|
@ -122,7 +122,7 @@ impl Validation {
|
||||
if let Some(truncate) = truncate {
|
||||
self.max_total_tokens.saturating_sub(truncate) as u32
|
||||
} else {
|
||||
return Err(ValidationError::UnsetMaxNewTokens)
|
||||
return Err(ValidationError::UnsetMaxNewTokens);
|
||||
}
|
||||
};
|
||||
let input_length = truncate.unwrap_or(self.max_input_length);
|
||||
|
Loading…
Reference in New Issue
Block a user