From b8952b2b32c9c77fb14304a31e974728d7fcad20 Mon Sep 17 00:00:00 2001 From: Vince Jankovics Date: Tue, 31 Oct 2023 12:51:13 +0000 Subject: [PATCH] Idefics force bfloat16 --- server/text_generation_server/models/idefics.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/text_generation_server/models/idefics.py b/server/text_generation_server/models/idefics.py index fa23d1f9..8cd7934b 100644 --- a/server/text_generation_server/models/idefics.py +++ b/server/text_generation_server/models/idefics.py @@ -39,7 +39,8 @@ class IDEFICSSharded(IdeficsCausalLM): device = torch.device(f"cuda:{rank}") # 9b seems to work correctly enough in float16, but 80b seems # to be really saturating for f16. - dtype = torch.bfloat16 if dtype is None else dtype + if dtype is None or dtype == torch.float16: + dtype = torch.bfloat16 else: device = torch.device("cpu") dtype = torch.float32 if dtype is None else dtype