mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-04-28 13:32:10 +00:00
Fix incorrect setting of max_new_tokens in warmup (#104)
Signed-off-by: Wang, Yi A <yi.a.wang@intel.com>
This commit is contained in:
parent
7149ac30e6
commit
3d81a80577
@ -175,7 +175,7 @@ impl Client {
|
|||||||
watermark: false,
|
watermark: false,
|
||||||
}),
|
}),
|
||||||
stopping_parameters: Some(StoppingCriteriaParameters {
|
stopping_parameters: Some(StoppingCriteriaParameters {
|
||||||
max_new_tokens: 10,
|
max_new_tokens: cmp::min(10, max_total_tokens - max_input_length),
|
||||||
stop_sequences: vec![],
|
stop_sequences: vec![],
|
||||||
ignore_eos_token: true,
|
ignore_eos_token: true,
|
||||||
}),
|
}),
|
||||||
|
Loading…
Reference in New Issue
Block a user