mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 12:24:53 +00:00
fix: add grammar_support to validation tests
This commit is contained in:
parent
8f1651f16d
commit
95e577b971
@ -482,6 +482,7 @@ mod tests {
|
|||||||
let max_input_length = 5;
|
let max_input_length = 5;
|
||||||
let max_total_tokens = 6;
|
let max_total_tokens = 6;
|
||||||
let workers = 1;
|
let workers = 1;
|
||||||
|
let grammar_support = false;
|
||||||
let validation = Validation::new(
|
let validation = Validation::new(
|
||||||
workers,
|
workers,
|
||||||
tokenizer,
|
tokenizer,
|
||||||
@ -490,6 +491,7 @@ mod tests {
|
|||||||
max_top_n_tokens,
|
max_top_n_tokens,
|
||||||
max_input_length,
|
max_input_length,
|
||||||
max_total_tokens,
|
max_total_tokens,
|
||||||
|
grammar_support,
|
||||||
);
|
);
|
||||||
|
|
||||||
let max_new_tokens = 10;
|
let max_new_tokens = 10;
|
||||||
@ -510,6 +512,7 @@ mod tests {
|
|||||||
let max_top_n_tokens = 4;
|
let max_top_n_tokens = 4;
|
||||||
let max_input_length = 5;
|
let max_input_length = 5;
|
||||||
let max_total_tokens = 6;
|
let max_total_tokens = 6;
|
||||||
|
let grammar_support = false;
|
||||||
let workers = 1;
|
let workers = 1;
|
||||||
let validation = Validation::new(
|
let validation = Validation::new(
|
||||||
workers,
|
workers,
|
||||||
@ -519,6 +522,7 @@ mod tests {
|
|||||||
max_top_n_tokens,
|
max_top_n_tokens,
|
||||||
max_input_length,
|
max_input_length,
|
||||||
max_total_tokens,
|
max_total_tokens,
|
||||||
|
grammar_support,
|
||||||
);
|
);
|
||||||
|
|
||||||
let max_new_tokens = 10;
|
let max_new_tokens = 10;
|
||||||
@ -540,6 +544,7 @@ mod tests {
|
|||||||
let max_input_length = 5;
|
let max_input_length = 5;
|
||||||
let max_total_tokens = 6;
|
let max_total_tokens = 6;
|
||||||
let workers = 1;
|
let workers = 1;
|
||||||
|
let grammar_support = false;
|
||||||
let validation = Validation::new(
|
let validation = Validation::new(
|
||||||
workers,
|
workers,
|
||||||
tokenizer,
|
tokenizer,
|
||||||
@ -548,6 +553,7 @@ mod tests {
|
|||||||
max_top_n_tokens,
|
max_top_n_tokens,
|
||||||
max_input_length,
|
max_input_length,
|
||||||
max_total_tokens,
|
max_total_tokens,
|
||||||
|
grammar_support,
|
||||||
);
|
);
|
||||||
match validation
|
match validation
|
||||||
.validate(GenerateRequest {
|
.validate(GenerateRequest {
|
||||||
@ -574,6 +580,7 @@ mod tests {
|
|||||||
let max_input_length = 5;
|
let max_input_length = 5;
|
||||||
let max_total_tokens = 106;
|
let max_total_tokens = 106;
|
||||||
let workers = 1;
|
let workers = 1;
|
||||||
|
let grammar_support = false;
|
||||||
let validation = Validation::new(
|
let validation = Validation::new(
|
||||||
workers,
|
workers,
|
||||||
tokenizer,
|
tokenizer,
|
||||||
@ -582,6 +589,7 @@ mod tests {
|
|||||||
max_top_n_tokens,
|
max_top_n_tokens,
|
||||||
max_input_length,
|
max_input_length,
|
||||||
max_total_tokens,
|
max_total_tokens,
|
||||||
|
grammar_support,
|
||||||
);
|
);
|
||||||
match validation
|
match validation
|
||||||
.validate(GenerateRequest {
|
.validate(GenerateRequest {
|
||||||
@ -637,6 +645,7 @@ mod tests {
|
|||||||
let max_input_length = 5;
|
let max_input_length = 5;
|
||||||
let max_total_tokens = 106;
|
let max_total_tokens = 106;
|
||||||
let workers = 1;
|
let workers = 1;
|
||||||
|
let grammar_support = false;
|
||||||
let validation = Validation::new(
|
let validation = Validation::new(
|
||||||
workers,
|
workers,
|
||||||
tokenizer,
|
tokenizer,
|
||||||
@ -645,6 +654,7 @@ mod tests {
|
|||||||
max_top_n_tokens,
|
max_top_n_tokens,
|
||||||
max_input_length,
|
max_input_length,
|
||||||
max_total_tokens,
|
max_total_tokens,
|
||||||
|
grammar_support,
|
||||||
);
|
);
|
||||||
match validation
|
match validation
|
||||||
.validate(GenerateRequest {
|
.validate(GenerateRequest {
|
||||||
|
Loading…
Reference in New Issue
Block a user