Use the latest transformers

Signed-off-by: yuanwu <yuan.wu@intel.com>
This commit is contained in:
yuanwu 2025-05-22 05:45:45 +03:00
parent 2e8d3e91ea
commit 346b6f7219
2 changed files with 3 additions and 2 deletions

View File

@ -49,7 +49,8 @@ from text_generation_server.models.custom_modeling.flash_qwen2_modeling import (
# Copied from: https://github.com/huggingface/transformers/blob/main/src/transformers/models/qwen2_5_vl/processing_qwen2_5_vl.py
from typing import Union
from transformers.feature_extraction_utils import BatchFeature
from transformers.image_utils import ImageInput, VideoInput
from transformers.image_utils import ImageInput
from transformers.video_utils import VideoInput
from transformers.processing_utils import (
ProcessingKwargs,
ProcessorMixin,

View File

@ -12,7 +12,7 @@ if [[ "$ATTENTION" == "paged" ]]; then
# Check if Llama-4 is in the command line arguments
if [[ "$*" == *"Llama-4"* || "$*" == *"Qwen3"* ]]; then
echo 'ATTENTION=paged and Llama-4 or Qwen3 detected'
pip install git+https://github.com/huggingface/transformers.git@29338949
pip install transformers==4.52.1
fi
fi