Fix flash-attn

This commit is contained in:
Henk
2023-09-28 09:51:09 +02:00
parent e238a1c9f6
commit fa8d9e65ff

View File

@@ -63,4 +63,4 @@ dependencies:
- windows-curses; sys_platform == 'win32'
- pynvml
- xformers==0.0.21
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.0/flash_attn-2.3.0+cu118torch2.0cxx11abiTRUE-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux'
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.0/flash_attn-2.3.0+cu118torch2.0cxx11abiFALSE-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux'