| # pip install torch==2.6.0 torchvision==0.21.0 torchaudio==2.6.0 --index-url https://download.pytorch.org/whl/cu124 | |
| # pip install flashinfer-python==0.2.2.post1 -i https://flashinfer.ai/whl/cu124/torch2.6 | |
| # pip install vllm==0.8.4 qwen_vl_utils keye_vl_utils opencv-python-headless==4.11.0.86 numpy==1.26.4 video-reader-rs | |
| # sudo pip install flash-attn==2.7.4.post1 --no-build-isolation | |
| # cp -r /mnt/bn/yufan-dev-my/ysh/Codes/dummy_dataloader/decord_temp/flash-attention /opt/tiger | |
| # cd /opt/tiger/flash-attention/hopper | |
| # pip install ninja==1.11.1.3 | |
| # sudo python setup.py install | |
| # pip install torch==2.8.0 torchvision==0.23.0 torchaudio==2.8.0 --index-url https://download.pytorch.org/whl/cu126 | |
| # pip install flashinfer-python==0.2.2.post1 -i https://flashinfer.ai/whl/cu124/torch2.6 | |
| # pip install qwen_vl_utils keye_vl_utils opencv-python-headless==4.11.0.86 numpy==1.26.4 video-reader-rs | |
| # pip install flash-attn==2.8.3 --no-build-isolation | |
| # pip install git+https://github.com/vllm-project/vllm.git |