2023-03-03 17:42:20 +00:00
|
|
|
transformers_commit := 2f87dca1ca3e5663d0637da9bb037a6956e57a5e
|
2023-03-03 16:56:27 +00:00
|
|
|
|
2022-10-08 10:30:12 +00:00
|
|
|
gen-server:
|
2022-10-22 18:00:15 +00:00
|
|
|
# Compile protos
|
2023-02-13 12:02:45 +00:00
|
|
|
pip install grpcio-tools==1.51.1 --no-cache-dir
|
2022-10-28 17:24:00 +00:00
|
|
|
mkdir text_generation/pb || true
|
|
|
|
python -m grpc_tools.protoc -I../proto --python_out=text_generation/pb --grpc_python_out=text_generation/pb ../proto/generate.proto
|
|
|
|
find text_generation/pb/ -type f -name "*.py" -print0 -exec sed -i -e 's/^\(import.*pb2\)/from . \1/g' {} \;
|
|
|
|
touch text_generation/pb/__init__.py
|
2022-10-08 10:30:12 +00:00
|
|
|
|
2022-10-18 13:19:03 +00:00
|
|
|
install-transformers:
|
2022-11-08 16:42:38 +00:00
|
|
|
# Install specific version of transformers with custom cuda kernels
|
2022-12-08 17:49:33 +00:00
|
|
|
pip uninstall transformers -y || true
|
|
|
|
rm -rf transformers || true
|
2023-03-03 16:56:27 +00:00
|
|
|
rm -rf transformers-$(transformers_commit) || true
|
|
|
|
curl -L -O https://github.com/OlivierDehaene/transformers/archive/$(transformers_commit).zip
|
|
|
|
unzip $(transformers_commit).zip
|
|
|
|
rm $(transformers_commit).zip
|
|
|
|
mv transformers-$(transformers_commit) transformers
|
2022-10-18 13:19:03 +00:00
|
|
|
cd transformers && python setup.py install
|
2022-10-08 10:30:12 +00:00
|
|
|
|
2022-10-18 13:19:03 +00:00
|
|
|
install-torch:
|
|
|
|
# Install specific version of torch
|
2023-01-24 18:52:39 +00:00
|
|
|
pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
|
2022-10-08 10:30:12 +00:00
|
|
|
|
2022-11-07 11:53:56 +00:00
|
|
|
install: gen-server install-torch install-transformers
|
2022-10-22 18:00:15 +00:00
|
|
|
pip install pip --upgrade
|
|
|
|
pip install -e . --no-cache-dir
|
2022-10-08 10:30:12 +00:00
|
|
|
|
2022-10-22 18:00:15 +00:00
|
|
|
run-dev:
|
2022-11-07 11:53:56 +00:00
|
|
|
SAFETENSORS_FAST_GPU=1 python -m torch.distributed.run --nproc_per_node=2 text_generation/cli.py serve bigscience/bloom-560m --sharded
|