Skip to content

Commit b66d76e

Browse files
committed
Use the correct requirements from vllm
Due to vllm-project/vllm#12547
1 parent ab228ec commit b66d76e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

vllm-benchmarks/run.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,11 @@ build_vllm() {
4141
SCCACHE_CACHE_SIZE=100G sccache --start-server || true
4242
# Build and install vLLM
4343
if command -v nvidia-smi; then
44-
pip install -r requirements-build.txt
44+
pip install -r requirements/build.txt
4545
pip install --editable .
4646
elif command -v amd-smi; then
47-
pip install -r requirements-rocm.txt
48-
pip install -r requirements-rocm-build.txt
47+
pip install -r requirements/rocm.txt
48+
pip install -r requirements/rocm-build.txt
4949
# https://docs.vllm.ai/en/latest/getting_started/installation/gpu/index.html?device=rocm
5050
PYTORCH_ROCM_ARCH="gfx90a;gfx942" python setup.py develop
5151
fi

0 commit comments

Comments
 (0)