fix error if top_n_tokens is 0 or null

This commit is contained in:
gduhamel 2024-01-23 21:05:13 +01:00
parent fd8b42678d
commit 1f7042d165
4 changed files with 4 additions and 4 deletions

View File

@ -719,7 +719,7 @@ class CausalLM(Model):
[next_token_id_squeezed.item() in self.all_special_ids], [next_token_id_squeezed.item() in self.all_special_ids],
), ),
generated_text, generated_text,
[top_tokens], [top_tokens] if top_tokens is not None else None,
) )
generations.append(generation) generations.append(generation)

View File

@ -1089,7 +1089,7 @@ class FlashCausalLM(Model):
[nid in self.all_special_ids for nid in _next_token_ids], [nid in self.all_special_ids for nid in _next_token_ids],
), ),
generated_text, generated_text,
[top_tokens], [top_tokens] if top_tokens is not None else None,
) )
generations.append(generation) generations.append(generation)

View File

@ -809,7 +809,7 @@ class IdeficsCausalLM(Model):
[next_token_id_squeezed.item() in self.all_special_ids], [next_token_id_squeezed.item() in self.all_special_ids],
), ),
generated_text, generated_text,
[top_tokens], [top_tokens] if top_tokens is not None else None,
) )
generations.append(generation) generations.append(generation)

View File

@ -773,7 +773,7 @@ class Seq2SeqLM(Model):
[next_token_id_squeezed.item() in self.all_special_ids], [next_token_id_squeezed.item() in self.all_special_ids],
), ),
generated_text, generated_text,
[top_tokens], [top_tokens] if top_tokens is not None else None,
) )
generations.append(generation) generations.append(generation)