Fix to non-LLAMA models (#177)

Co-authored-by: Jacek Czaja <jczaja@habana.ai>
This commit is contained in:
Jacek Czaja 2024-07-04 13:42:24 +02:00 committed by GitHub
parent 6168aa4100
commit 5df20f88ff
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -850,9 +850,12 @@ class CausalLM(Model):
"attention_mask": attention_mask, "attention_mask": attention_mask,
"past_key_values": past_key_values, "past_key_values": past_key_values,
"token_idx": token_idx, "token_idx": token_idx,
"lazy_mode": LAZY_MODE == 1,
} }
# Optimum Habana got "lazy_mode" key-val only supported for llama type of models
if self.model.config.model_type == "llama" :
kwargs["lazy_mode"] = LAZY_MODE == 1
if self.has_position_ids: if self.has_position_ids:
kwargs["position_ids"] = position_ids kwargs["position_ids"] = position_ids