File tree Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Original file line number Diff line number Diff line change @@ -87,9 +87,10 @@ cu=$(nvcc --version | grep "Cuda compilation tools" | awk '{print $5}' | cut -d
8787torch=$( pip show torch | grep Version | awk ' {print $2}' | cut -d ' +' -f 1 | cut -d ' .' -f 1,2)
8888cp=$( python3 --version | awk ' {print $2}' | awk -F. ' {print $1$2}' )
8989cxx=$( g++ --version | grep ' g++' | awk ' {print $3}' | cut -d ' .' -f 1)
90- wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
91- pip install --no-cache-dir flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
92- rm flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
90+ flash_attn_version=" 2.8.0.post2"
91+ wget https://github.com/Dao-AILab/flash-attention/releases/download/v${flash_attn_version} /flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
92+ pip install --no-cache-dir flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
93+ rm flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
9394
9495# From Megatron-LM log
9596pip install " git+https://github.com/Dao-AILab/flash-attention.git@v2.7.2#egg=flashattn-hopper&subdirectory=hopper"
You can’t perform that action at this time.
0 commit comments