diff --git a/.gitignore b/.gitignore index d470fb4b..52451146 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,8 @@ stories /.project *.bak miniconda3 +runtime +bin *.settings __pycache__ *.log diff --git a/commandline-rocm.sh b/commandline-rocm.sh new file mode 100644 index 00000000..5c9a54aa --- /dev/null +++ b/commandline-rocm.sh @@ -0,0 +1 @@ +bin/micromamba run -r runtime -n koboldai-rocm bash diff --git a/commandline.sh b/commandline.sh new file mode 100644 index 00000000..72338169 --- /dev/null +++ b/commandline.sh @@ -0,0 +1 @@ +bin/micromamba run -r runtime -n koboldai bash diff --git a/play-cuda.sh b/docker-cuda.sh similarity index 100% rename from play-cuda.sh rename to docker-cuda.sh diff --git a/docker-rocm.sh b/docker-rocm.sh new file mode 100644 index 00000000..d32c404c --- /dev/null +++ b/docker-rocm.sh @@ -0,0 +1,4 @@ +cd docker-rocm +xhost +local:docker +cp ../environments/rocm.yml env.yml +docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*" diff --git a/environments/rocm.yml b/environments/rocm.yml index c1c19932..eee040af 100644 --- a/environments/rocm.yml +++ b/environments/rocm.yml @@ -15,9 +15,9 @@ dependencies: - protobuf - pip: - --find-links https://download.pytorch.org/whl/rocm4.2/torch_stable.html - - torch==1.11.* - - torchvision==0.11.1 + - torch==1.10.* + - torchvision - flask-cloudflared - flask-ngrok - lupa==1.10 - - transformers>=4.17 \ No newline at end of file + - transformers>=4.17 diff --git a/play-rocm.sh b/play-rocm.sh index d32c404c..abff2106 100644 --- a/play-rocm.sh +++ b/play-rocm.sh @@ -1,4 +1,3 @@ -cd docker-rocm -xhost +local:docker -cp ../environments/rocm.yml env.yml -docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*" +wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba +bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y +bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py diff --git a/play.sh b/play.sh new file mode 100644 index 00000000..25f433d1 --- /dev/null +++ b/play.sh @@ -0,0 +1,3 @@ +wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba +bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y +bin/micromamba run -r runtime -n koboldai python aiserver.py