From a50b33a964e267833d985075d183c5aa423b0a60 Mon Sep 17 00:00:00 2001 From: yuanwu Date: Fri, 13 Jun 2025 05:43:16 +0000 Subject: [PATCH] Fix mistral error Signed-off-by: yuanwu --- .../models/custom_modeling/flash_mistral_modeling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py b/backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py index a4ad8f59..4fad7c65 100644 --- a/backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py +++ b/backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py @@ -111,7 +111,7 @@ class MistralAttention(torch.nn.Module): ) self.num_heads = config.num_attention_heads self.hidden_size = config.hidden_size - if hasattr(config, "head_dim"): + if hasattr(config, "head_dim") and config.head_dim is not None: self.head_size = config.head_dim else: self.head_size = self.hidden_size // self.num_heads