From c4b78bd21447eda1bfae791ef58ca81fba7496cc Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Mon, 22 Jul 2024 13:54:17 +0000 Subject: [PATCH] No access to transformers config, only config_dict here. --- server/text_generation_server/models/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/text_generation_server/models/__init__.py b/server/text_generation_server/models/__init__.py index 4bcb657d..4dd53f53 100644 --- a/server/text_generation_server/models/__init__.py +++ b/server/text_generation_server/models/__init__.py @@ -758,7 +758,7 @@ def get_model( trust_remote_code=trust_remote_code, lora_adapter_ids=lora_adapter_ids, # hidden_size / num_attention_heads is wrong in `google/gemma-2-9b-it` - head_size=config.head_dim, + head_size=config_dict["head_dim"], ) elif sharded: raise NotImplementedError(FLASH_ATT_ERROR_MESSAGE.format("Sharded Gemma2"))