Baichuan2-13B does not have max_position_embeddings in config

see https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat/blob/main/config.json

Signed-off-by: Wang, Yi A <yi.a.wang@intel.com>
This commit is contained in:
Wang, Yi A 2025-01-12 22:47:23 -08:00
parent 83624a07be
commit 5ad8c9a40b

View File

@ -1595,7 +1595,11 @@ class FlashCausalLM(Model):
if max_total_tokens is None:
if get_support_chunking():
model_max_length = self.tokenizer.model_max_length
max_position_embeddings = self.config.max_position_embeddings
max_position_embeddings = (
self.config.max_position_embeddings
if hasattr(self.config, "max_position_embeddings")
else model_max_length
)
max_total_tokens = min(
num_blocks * BLOCK_SIZE, model_max_length, max_position_embeddings
)