mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-09 19:34:53 +00:00
Add missing dependency to the target install-flash-attention
This commit is contained in:
parent
e71471bec9
commit
b00907d36f
@ -17,7 +17,7 @@ install-torch:
|
||||
# Install specific version of torch
|
||||
pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
|
||||
|
||||
install: gen-server install-torch install-transformers
|
||||
install: gen-server install-torch install-transformers install-flash-attention
|
||||
pip install pip --upgrade
|
||||
pip install -r requirements.txt
|
||||
pip install -e ".[bnb, accelerate]"
|
||||
@ -26,4 +26,4 @@ run-dev:
|
||||
SAFETENSORS_FAST_GPU=1 python -m torch.distributed.run --nproc_per_node=2 text_generation_server/cli.py serve bigscience/bloom-560m --sharded
|
||||
|
||||
export-requirements:
|
||||
poetry export -o requirements.txt -E bnb --without-hashes
|
||||
poetry export -o requirements.txt -E bnb --without-hashes
|
||||
|
Loading…
Reference in New Issue
Block a user