mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 12:24:53 +00:00
fix: simplify changes
This commit is contained in:
parent
4ff9cb806b
commit
0370b0feda
@ -47,7 +47,6 @@ pub async fn run(
|
||||
watermark,
|
||||
grammar: String::new(),
|
||||
grammar_type: GrammarType::None as i32,
|
||||
grammar_state: 0,
|
||||
};
|
||||
|
||||
// Initialize terminal properties
|
||||
|
@ -80,8 +80,6 @@ message NextTokenChooserParameters {
|
||||
string grammar = 10;
|
||||
/// grammar type
|
||||
GrammarType grammar_type = 11;
|
||||
/// grammar fsm state
|
||||
uint32 grammar_state = 12;
|
||||
}
|
||||
|
||||
message StoppingCriteriaParameters {
|
||||
|
@ -130,7 +130,6 @@ impl Client {
|
||||
watermark: true,
|
||||
grammar: String::new(),
|
||||
grammar_type: GrammarType::None as i32,
|
||||
grammar_state: 0,
|
||||
}),
|
||||
stopping_parameters: Some(StoppingCriteriaParameters {
|
||||
max_new_tokens: max_total_tokens - truncate,
|
||||
|
@ -48,7 +48,6 @@ impl Health {
|
||||
watermark: false,
|
||||
grammar: String::new(),
|
||||
grammar_type: ProtoGrammarType::None as i32,
|
||||
grammar_state: 0,
|
||||
}),
|
||||
stopping_parameters: Some(StoppingCriteriaParameters {
|
||||
max_new_tokens: 1,
|
||||
|
@ -372,7 +372,6 @@ mod tests {
|
||||
watermark: false,
|
||||
grammar: String::new(),
|
||||
grammar_type: ProtoGrammarType::None as i32,
|
||||
grammar_state: 0,
|
||||
},
|
||||
stopping_parameters: StoppingCriteriaParameters {
|
||||
ignore_eos_token: false,
|
||||
|
@ -356,7 +356,6 @@ impl Validation {
|
||||
watermark,
|
||||
grammar,
|
||||
grammar_type,
|
||||
grammar_state: 0,
|
||||
};
|
||||
let stopping_parameters = StoppingCriteriaParameters {
|
||||
max_new_tokens,
|
||||
|
@ -468,8 +468,6 @@ class HeterogeneousNextTokenChooser:
|
||||
tokenizer: PreTrainedTokenizerBase,
|
||||
fsm_grammar_states: Optional[List[int]] = None,
|
||||
) -> "HeterogeneousNextTokenChooser":
|
||||
if fsm_grammar_states is None:
|
||||
fsm_grammar_states = [pb_.grammar_state for pb_ in pb]
|
||||
return HeterogeneousNextTokenChooser(
|
||||
watermark=[pb_.watermark for pb_ in pb],
|
||||
temperature=[pb_.temperature for pb_ in pb],
|
||||
@ -485,7 +483,9 @@ class HeterogeneousNextTokenChooser:
|
||||
tokenizer=tokenizer,
|
||||
grammars=[pb_.grammar for pb_ in pb],
|
||||
grammar_types=[pb_.grammar_type for pb_ in pb],
|
||||
fsm_grammar_states=fsm_grammar_states,
|
||||
fsm_grammar_states=(
|
||||
fsm_grammar_states if fsm_grammar_states else [0] * len(pb)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user