fix install

This commit is contained in:
OlivierDehaene 2023-04-11 11:46:34 +02:00
parent b5fec41033
commit a265dde4e0
2 changed files with 4 additions and 4 deletions

View File

@ -5,12 +5,12 @@ flash-attention:
pip install packaging
git clone https://github.com/HazyResearch/flash-attention.git
build-flash-attention:
build-flash-attention: flash-attention
cd flash-attention && git fetch && git checkout $(flash_att_commit)
cd flash-attention && python setup.py build
cd flash-attention/csrc/rotary && python setup.py build
cd flash-attention/csrc/layer_norm && python setup.py build
install-flash-attention: flash-attention build-flash-attention
install-flash-attention: build-flash-attention
pip uninstall flash_attn rotary_emb dropout_layer_norm -y || true
cd flash-attention && python setup.py install && cd csrc/layer_norm && python setup.py install && cd ../rotary && python setup.py install

View File

@ -5,9 +5,9 @@ transformers:
pip install --upgrade setuptools
git clone https://github.com/OlivierDehaene/transformers.git
build-transformers:
build-transformers: transformers
cd transformers && git fetch && git checkout $(transformers_commit) && python setup.py build
install-transformers: transformers build-transformers
install-transformers: build-transformers
pip uninstall transformers -y || true
cd transformers && python setup.py install