Add missing dependency to the target install-flash-attention

This commit is contained in:
Yang, Bo 2023-05-16 09:57:48 -07:00 committed by GitHub
parent e71471bec9
commit b00907d36f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -17,7 +17,7 @@ install-torch:
# Install specific version of torch # Install specific version of torch
pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
install: gen-server install-torch install-transformers install: gen-server install-torch install-transformers install-flash-attention
pip install pip --upgrade pip install pip --upgrade
pip install -r requirements.txt pip install -r requirements.txt
pip install -e ".[bnb, accelerate]" pip install -e ".[bnb, accelerate]"