Sayoyo commited on
Commit
6ed2902
·
1 Parent(s): ccf578a

feat: change flash-atten

Browse files
Files changed (1) hide show
  1. requirements.txt +1 -1
requirements.txt CHANGED
@@ -32,7 +32,7 @@ lightning>=2.0.0
32
  triton-windows>=3.0.0,<3.4; sys_platform == 'win32'
33
  triton>=3.0.0; sys_platform != 'win32'
34
  flash-attn @ https://github.com/sdbds/flash-attention-for-windows/releases/download/2.8.2/flash_attn-2.8.2+cu128torch2.7.1cxx11abiFALSEfullbackward-cp311-cp311-win_amd64.whl ; sys_platform == 'win32' and python_version == '3.11' and platform_machine == 'AMD64'
35
- flash-attn; sys_platform != 'win32'
36
  xxhash
37
 
38
  # HuggingFace Space required
 
32
  triton-windows>=3.0.0,<3.4; sys_platform == 'win32'
33
  triton>=3.0.0; sys_platform != 'win32'
34
  flash-attn @ https://github.com/sdbds/flash-attention-for-windows/releases/download/2.8.2/flash_attn-2.8.2+cu128torch2.7.1cxx11abiFALSEfullbackward-cp311-cp311-win_amd64.whl ; sys_platform == 'win32' and python_version == '3.11' and platform_machine == 'AMD64'
35
+ flash-attn @ https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.12/flash_attn-2.8.3+cu128torch2.10-cp311-cp311-linux_x86_64.whl ; sys_platform == 'linux' and python_version == '3.11'
36
  xxhash
37
 
38
  # HuggingFace Space required