2023-12-11 11:46:30 +00:00
|
|
|
vllm-cuda:
|
|
|
|
# Clone vllm
|
|
|
|
pip install -U ninja packaging --no-cache-dir
|
2024-04-10 15:20:25 +00:00
|
|
|
git clone https://github.com/OlivierDehaene/vllm.git vllm
|
2023-12-11 11:46:30 +00:00
|
|
|
|
|
|
|
build-vllm-cuda: vllm-cuda
|
2024-04-10 15:20:25 +00:00
|
|
|
cd vllm && git fetch && git checkout 4bec8cee87f6bb8cebaec297029713cd2082e0b2
|
2023-12-11 11:46:30 +00:00
|
|
|
cd vllm && python setup.py build
|
2023-11-27 13:08:12 +00:00
|
|
|
|
2023-12-11 11:46:30 +00:00
|
|
|
install-vllm-cuda: build-vllm-cuda
|
|
|
|
pip uninstall vllm -y || true
|
|
|
|
cd vllm && python setup.py install
|
2023-06-30 17:09:59 +00:00
|
|
|
|
2023-12-11 11:46:30 +00:00
|
|
|
vllm-rocm:
|
2023-06-30 17:09:59 +00:00
|
|
|
# Clone vllm
|
2023-11-27 13:08:12 +00:00
|
|
|
pip install -U ninja packaging --no-cache-dir
|
2024-04-18 23:31:28 +00:00
|
|
|
git clone https://github.com/fxmarty/rocm-vllm.git vllm
|
2023-06-30 17:09:59 +00:00
|
|
|
|
2023-12-11 11:46:30 +00:00
|
|
|
build-vllm-rocm: vllm-rocm
|
2024-04-18 23:31:28 +00:00
|
|
|
cd vllm && git fetch && git checkout ca6913b3c2ffacdcb7d15e914dc34adbc6c89479
|
|
|
|
cd vllm && patch /opt/rocm/include/hip/amd_detail/amd_hip_bf16.h ./rocm_patch/rocm_bf16.patch
|
|
|
|
cd vllm && PYTORCH_ROCM_ARCH="gfx90a;gfx942" python setup.py install
|
2023-06-30 17:09:59 +00:00
|
|
|
|
2023-12-11 11:46:30 +00:00
|
|
|
install-vllm-rocm: build-vllm-rocm
|
2023-06-30 17:09:59 +00:00
|
|
|
pip uninstall vllm -y || true
|
2023-09-04 13:00:19 +00:00
|
|
|
cd vllm && python setup.py install
|