From a3dc188c8fb59f86577fc61becc3c0d47578e3e5 Mon Sep 17 00:00:00 2001 From: Henk Date: Sun, 1 May 2022 15:58:37 +0200 Subject: [PATCH] Linux Installer Improvements --- install_requirements.sh | 15 +++++++++++++++ play-rocm.sh | 5 +++-- play.sh | 5 +++-- 3 files changed, 21 insertions(+), 4 deletions(-) create mode 100755 install_requirements.sh diff --git a/install_requirements.sh b/install_requirements.sh new file mode 100755 index 00000000..99b698bc --- /dev/null +++ b/install_requirements.sh @@ -0,0 +1,15 @@ +if [[ $1 = "cuda" ]]; then +wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba +bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y +# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster +bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y +exit +fi +if [[ $1 = "rocm" ]]; then +wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba +bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y +# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster +bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y +exit +fi +echo Please specify either CUDA or ROCM diff --git a/play-rocm.sh b/play-rocm.sh index 8e3666a0..351af73a 100755 --- a/play-rocm.sh +++ b/play-rocm.sh @@ -1,3 +1,4 @@ -wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba -bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y +if [ ! -f "runtime/envs/koboldai-rocm/bin/python" ]; then +source ./install_requirements.sh rocm +fi bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py $* diff --git a/play.sh b/play.sh index 061cdc5e..5e1fe2e0 100755 --- a/play.sh +++ b/play.sh @@ -1,3 +1,4 @@ -wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba -bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y +if [ ! -f "runtime/envs/koboldai/bin/python" ]; then +source ./install_requirements.sh cuda +fi bin/micromamba run -r runtime -n koboldai python aiserver.py $*