mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 20:34:54 +00:00
fix: apply paligemma template conditionally
This commit is contained in:
parent
c119ac4d1d
commit
d6e306c2b3
@ -173,7 +173,10 @@ class VlmCausalLMBatch(FlashMistralBatch):
|
||||
image_id = 0
|
||||
for chunk in chunks:
|
||||
if chunk["type"] == "text":
|
||||
full_text += "<bos>" + chunk["content"] + "\n"
|
||||
if config.model_type == "paligemma":
|
||||
full_text += "<bos>" + chunk["content"] + "\n"
|
||||
else:
|
||||
full_text += chunk["content"]
|
||||
elif chunk["type"] == "image":
|
||||
image = chunk["content"]
|
||||
# Should never receive URLs anymore, processing should be done
|
||||
|
Loading…
Reference in New Issue
Block a user