Fixing Idefics dtype.

This commit is contained in:
Nicolas Patry 2023-11-23 13:57:28 +00:00
parent 861acdeab1
commit a8da815b81

View File

@ -583,7 +583,7 @@ class IdeficsCausalLM(Model):
if torch.cuda.is_available():
device = torch.device("cuda")
dtype = torch.float16 if dtype is None else dtype
dtype = torch.bfloat16 if dtype is None else dtype
else:
if quantize:
raise ValueError("quantization is not available on CPU")