This commit is contained in:
Henk 2023-12-31 01:47:14 +01:00
parent 699b5819a0
commit a85d2a72c1
2 changed files with 6 additions and 0 deletions

View File

@ -55,6 +55,8 @@ dependencies:
- https://github.com/0cc4m/GPTQ-for-LLaMa/releases/download/0.0.6/gptq_koboldai-0.0.6-cp38-cp38-win_amd64.whl; sys_platform == 'win32'
- https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl; sys_platform == 'linux'
- https://huggingface.github.io/autogptq-index/whl/cu118/auto-gptq/auto_gptq-0.5.1%2Bcu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32'
- https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux'
- https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32'
- einops
- peft==0.7.1
- scipy

View File

@ -53,3 +53,7 @@ https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn
https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.8'
xformers==0.0.23.post1
omegaconf
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp310-cp310-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.10'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp310-cp310-win_amd64.whl; sys_platform == 'win32' and python_version == '3.10'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-linux_x86_64.whl; sys_platform == 'linux' and python_version == '3.8'
https://github.com/casper-hansen/AutoAWQ/releases/download/v0.1.8/autoawq-0.1.8+cu118-cp38-cp38-win_amd64.whl; sys_platform == 'win32' and python_version == '3.8'