From 861acdeab1bbf4142ede13f2180b30d0feccb77e Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Thu, 23 Nov 2023 13:57:02 +0000 Subject: [PATCH] Revert "Idefics force bfloat16" This reverts commit b8952b2b32c9c77fb14304a31e974728d7fcad20. --- server/text_generation_server/models/idefics.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/text_generation_server/models/idefics.py b/server/text_generation_server/models/idefics.py index 8cd7934b..fa23d1f9 100644 --- a/server/text_generation_server/models/idefics.py +++ b/server/text_generation_server/models/idefics.py @@ -39,8 +39,7 @@ class IDEFICSSharded(IdeficsCausalLM): device = torch.device(f"cuda:{rank}") # 9b seems to work correctly enough in float16, but 80b seems # to be really saturating for f16. - if dtype is None or dtype == torch.float16: - dtype = torch.bfloat16 + dtype = torch.bfloat16 if dtype is None else dtype else: device = torch.device("cpu") dtype = torch.float32 if dtype is None else dtype