Actually stay on flash v1.

This commit is contained in:
Nicolas Patry 2025-02-12 09:09:11 +01:00
parent bcf98a8b81
commit 3217134791
No known key found for this signature in database
GPG Key ID: 4242CEF24CB6DBF9

View File

@ -1,9 +1,10 @@
flash_att_commit := 1a7f4dfa9e51f6a90177a3244a5bc9c687894cdd # This is v1.0.9
flash_att_commit := 6d48e14a6c2f551db96f0badc658a6279a929df3
build-flash-attention: build-flash-attention:
if [ ! -d 'flash-attention' ]; then \ if [ ! -d 'flash-attention' ]; then \
pip install -U packaging ninja --no-cache-dir && \ pip install -U packaging ninja --no-cache-dir && \
git clone https://github.com/HazyResearch/flash-attention.git; \ git clone https://github.com/Dao-AILab/flash-attention.git; \
fi fi
cd flash-attention && git fetch && git checkout $(flash_att_commit) && \ cd flash-attention && git fetch && git checkout $(flash_att_commit) && \
MAX_JOBS=8 python setup.py build && cd csrc/layer_norm && python setup.py build && cd ../rotary && python setup.py build MAX_JOBS=8 python setup.py build && cd csrc/layer_norm && python setup.py build && cd ../rotary && python setup.py build