mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-06-19 15:52:08 +00:00
Change const to usize
This commit is contained in:
parent
d002ab3eb1
commit
b2acd1b15e
@ -6,7 +6,7 @@ use thiserror::Error;
|
|||||||
use tokenizers::tokenizer::Tokenizer;
|
use tokenizers::tokenizer::Tokenizer;
|
||||||
use tokio::sync::{mpsc, oneshot};
|
use tokio::sync::{mpsc, oneshot};
|
||||||
|
|
||||||
const MAX_MAX_NEW_TOKENS: u32 = 512;
|
const MAX_MAX_NEW_TOKENS: usize = 512;
|
||||||
const MAX_STOP_SEQUENCES: usize = 4;
|
const MAX_STOP_SEQUENCES: usize = 4;
|
||||||
|
|
||||||
/// Validation
|
/// Validation
|
||||||
@ -112,8 +112,8 @@ fn validate(
|
|||||||
if request.parameters.top_k < 0 {
|
if request.parameters.top_k < 0 {
|
||||||
return Err(ValidationError::TopK);
|
return Err(ValidationError::TopK);
|
||||||
}
|
}
|
||||||
if request.parameters.max_new_tokens > MAX_MAX_NEW_TOKENS {
|
if request.parameters.max_new_tokens as usize > MAX_MAX_NEW_TOKENS {
|
||||||
return Err(ValidationError::MaxNewTokens(MAX_MAX_NEW_TOKENS as usize));
|
return Err(ValidationError::MaxNewTokens(MAX_MAX_NEW_TOKENS));
|
||||||
}
|
}
|
||||||
if request.parameters.stop.len() > MAX_STOP_SEQUENCES {
|
if request.parameters.stop.len() > MAX_STOP_SEQUENCES {
|
||||||
return Err(ValidationError::StopSequence(
|
return Err(ValidationError::StopSequence(
|
||||||
|
Loading…
Reference in New Issue
Block a user