Update flashattn.sh
Browse files- flashattn.sh +1 -1
flashattn.sh
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
-
MAX_JOBS=
|
| 2 |
#git clone https://github.com/Dao-AILab/flash-attention.git
|
| 3 |
#cd flash-attention/hopper && MAX_JOBS=4 python setup.py install
|
|
|
|
| 1 |
+
MAX_JOBS=2 pip install flash-attn --use-pep517 --no-build-isolation
|
| 2 |
#git clone https://github.com/Dao-AILab/flash-attention.git
|
| 3 |
#cd flash-attention/hopper && MAX_JOBS=4 python setup.py install
|