From 1bcaf8f5caccfeea383d3bb2b2e157c5532cbec6 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Wed, 15 May 2024 10:21:16 +0000 Subject: [PATCH] Fixed. --- server/text_generation_server/models/vlm_causal_lm.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/server/text_generation_server/models/vlm_causal_lm.py b/server/text_generation_server/models/vlm_causal_lm.py index 37664230..f0db89b2 100644 --- a/server/text_generation_server/models/vlm_causal_lm.py +++ b/server/text_generation_server/models/vlm_causal_lm.py @@ -173,10 +173,7 @@ class VlmCausalLMBatch(FlashMistralBatch): image_id = 0 for chunk in chunks: if chunk["type"] == "text": - if config.model_type == "paligemma": - full_text += "" + chunk["content"] + "\n" - else: - full_text += chunk["content"] + full_text += chunk["content"] elif chunk["type"] == "image": image = chunk["content"] # Should never receive URLs anymore, processing should be done