Add missing dependency to the target install-flash-attention

This commit is contained in:
Yang, Bo 2023-05-16 09:57:48 -07:00 committed by GitHub
parent e71471bec9
commit b00907d36f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -17,7 +17,7 @@ install-torch:
# Install specific version of torch
pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
install: gen-server install-torch install-transformers
install: gen-server install-torch install-transformers install-flash-attention
pip install pip --upgrade
pip install -r requirements.txt
pip install -e ".[bnb, accelerate]"
@ -26,4 +26,4 @@ run-dev:
SAFETENSORS_FAST_GPU=1 python -m torch.distributed.run --nproc_per_node=2 text_generation_server/cli.py serve bigscience/bloom-560m --sharded
export-requirements:
poetry export -o requirements.txt -E bnb --without-hashes
poetry export -o requirements.txt -E bnb --without-hashes