mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2024-12-12 08:36:28 +01:00
17 lines
880 B
Bash
Executable File
17 lines
880 B
Bash
Executable File
#!/bin/bash
|
|
if [[ $1 = "cuda" ]]; then
|
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
exit
|
|
fi
|
|
if [[ $1 = "rocm" ]]; then
|
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
exit
|
|
fi
|
|
echo Please specify either CUDA or ROCM
|