We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent ba96c89 commit 002383fCopy full SHA for 002383f
1 file changed
README.md
@@ -60,7 +60,8 @@ conda create -n modalities python=3.13
60
conda activate modalities
61
62
# Install PyTorch, psutil, Ninja and Flash Attention
63
-pip install "torch>=2.10,<2.11.0" # Or appropriate version for your CUDA setup.
+# For PyTorch, select the correct index URL for your CUDA/CPU setup from https://pytorch.org/get-started/locally/ e.g.:
64
+pip install "torch>=2.10,<2.11.0" torchvision --index-url https://download.pytorch.org/whl/cu130
65
pip install psutil ninja # Ninja lowers compilation time of flash attention significantly
66
pip install flash-attn==2.8.3 --no-build-isolation
67
```
0 commit comments