2022-05-08 16:02:16 +02:00
|
|
|
#!/bin/bash
|
2023-04-21 03:00:52 +02:00
|
|
|
if [[ $1 = "cuda" || $1 = "CUDA" ]]; then
|
2022-05-01 15:58:37 +02:00
|
|
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
|
|
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
|
|
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
|
|
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
|
|
exit
|
|
|
|
fi
|
2023-04-21 03:00:52 +02:00
|
|
|
if [[ $1 = "rocm" || $1 = "ROCM" ]]; then
|
2022-05-01 15:58:37 +02:00
|
|
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
|
|
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
|
|
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
|
|
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
|
|
exit
|
|
|
|
fi
|
|
|
|
echo Please specify either CUDA or ROCM
|