From a8da815b81c481bf9ab4d8f8c9c889690a52cbc9 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Thu, 23 Nov 2023 13:57:28 +0000 Subject: [PATCH] Fixing Idefics dtype. --- server/text_generation_server/models/idefics_causal_lm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/text_generation_server/models/idefics_causal_lm.py b/server/text_generation_server/models/idefics_causal_lm.py index 2472caf6..dcad1fa9 100644 --- a/server/text_generation_server/models/idefics_causal_lm.py +++ b/server/text_generation_server/models/idefics_causal_lm.py @@ -583,7 +583,7 @@ class IdeficsCausalLM(Model): if torch.cuda.is_available(): device = torch.device("cuda") - dtype = torch.float16 if dtype is None else dtype + dtype = torch.bfloat16 if dtype is None else dtype else: if quantize: raise ValueError("quantization is not available on CPU")