From f08a75255b7af9f97fca5b4d7453ec76e9048e67 Mon Sep 17 00:00:00 2001 From: Henk Date: Fri, 20 Oct 2023 16:36:51 +0200 Subject: [PATCH] Fix flash_attn on colab --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 197722fe..7eb3b66c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -48,6 +48,7 @@ https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.4.2/auto_gptq-0.4.2+cu https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.4.2/auto_gptq-0.4.2+cu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32' and python_version == '3.8' windows-curses; sys_platform == 'win32' pynvml -flash_attn==2.3.0 +https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.0/flash_attn-2.3.0+cu118torch2.0cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.10' +https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.0/flash_attn-2.3.0+cu118torch2.0cxx11abiFALSE-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.8' xformers==0.0.21 exllamav2==0.0.4 \ No newline at end of file