diff --git a/docker-cuda/Dockerfile b/docker-cuda/Dockerfile new file mode 100644 index 00000000..2ea3377b --- /dev/null +++ b/docker-cuda/Dockerfile @@ -0,0 +1,8 @@ +# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and +# installs the rocm-dev package. + +FROM mambaorg/micromamba +WORKDIR /content/ +COPY env.yml /home/micromamba/env.yml +RUN apt update && apt install xorg -y +RUN micromamba install -y -n base -f /home/micromamba/env.yml diff --git a/docker-cuda/docker-compose.yml b/docker-cuda/docker-compose.yml new file mode 100644 index 00000000..960f2684 --- /dev/null +++ b/docker-cuda/docker-compose.yml @@ -0,0 +1,16 @@ +version: "2" +services: + koboldai: + build: . + environment: + - DISPLAY=${DISPLAY} + network_mode: "host" + volumes: + - /tmp/.X11-unix:/tmp/.X11-unix + - ../:/content/ + - $HOME/.Xauthority:/home/micromamba/.Xauthority:rw + devices: + - /dev/kfd + - /dev/dri + group_add: + - video diff --git a/docker-rocm/Dockerfile b/docker-rocm/Dockerfile new file mode 100644 index 00000000..0ea31a3b --- /dev/null +++ b/docker-rocm/Dockerfile @@ -0,0 +1,6 @@ +# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and +# installs the rocm-dev package. + +FROM rocm/pytorch +RUN conda install --all -y Flask-SocketIO +RUN pip3 install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3 diff --git a/docker-rocm/docker-compose.yml b/docker-rocm/docker-compose.yml new file mode 100644 index 00000000..3fc217a5 --- /dev/null +++ b/docker-rocm/docker-compose.yml @@ -0,0 +1,16 @@ +version: "2" +services: + koboldai: + build: . + environment: + - DISPLAY=${DISPLAY} + network_mode: "host" + volumes: + - /tmp/.X11-unix:/tmp/.X11-unix + - ../:/content/ + - $HOME/.Xauthority:/root/.Xauthority:rw + devices: + - /dev/kfd + - /dev/dri + group_add: + - video diff --git a/environments/finetuneanon.yml b/environments/finetuneanon.yml new file mode 100644 index 00000000..2ac0cc55 --- /dev/null +++ b/environments/finetuneanon.yml @@ -0,0 +1,15 @@ +name: koboldai +channels: + - pytorch + - conda-forge + - defaults +dependencies: + - colorama + - flask-socketio + - pytorch + - tensorflow-gpu + - python=3.8.* + - pip + - git + - pip: + - git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3-rp-b \ No newline at end of file diff --git a/environments/huggingface.yml b/environments/huggingface.yml new file mode 100644 index 00000000..bfbd33e5 --- /dev/null +++ b/environments/huggingface.yml @@ -0,0 +1,12 @@ +name: koboldai +channels: + - huggingface + - pytorch + - conda-forge + - defaults +dependencies: + - colorama + - flask-socketio + - pytorch + - tensorflow-gpu + - transformers \ No newline at end of file diff --git a/install_requirements.bat b/install_requirements.bat index f7f5f1ab..f368446b 100644 --- a/install_requirements.bat +++ b/install_requirements.bat @@ -1,31 +1,62 @@ @echo off -title Installing Portable Python (Miniconda3) -echo Miniconda3's installer will overwrite existing Miniconda3 shortcuts in the startmenu (We currently can not prevent this) +title KoboldAI Runtime Installer (MicroMamba) echo Please choose one of the following transformers options -echo 1. Finetuneanon Transformers -echo 2. Official Transformers (Only use this if your model does not support half) +echo 1. Finetuneanon Transformers (Best for GPU users) +echo 2. Official Transformers (Best for CPU users) echo. -echo Errors? Rerun this as admin so it can add the needed registery tweak. +echo Errors? Rerun this as admin so it can add the needed LongPathsEnabled registery tweak. +echo Installer failed or crashed? Run it again so it can continue. +echo Only Windows 10 and higher officially supported, older Windows installations can't handle the paths. echo. -SET /P M=Type the number of the desired option and then press ENTER: +SET /P B=Type the number of the desired option and then press ENTER: Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled" /t REG_DWORD /d "1" /f 2>nul - cd %~dp0 -rmdir /s /q miniconda3 -where /q curl.exe -IF ERRORLEVEL 1 ( - bitsadmin /transfer miniconda /download /priority normal https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe "%~dp0\miniconda3.exe" + +if exist miniconda3\ ( + echo Delete existing installation? + echo This is required if you are switching modes, or if you get dependency errors in the game. + echo 1. Yes + echo 2. No + SET /P D=Type the number of the desired option and then press ENTER: ) ELSE ( - curl -o miniconda3.exe https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe + SET D=Workaround ) -miniconda3.exe /S /InstallationType=JustMe /RegisterPython=0 /AddTopath=0 /NoScripts=1 /NoRegistry=1 /D=%~dp0\miniconda3 -del miniconda3.exe -call miniconda3\condabin\activate -call conda install --all --no-shortcuts -y git pytorch tensorflow-gpu colorama Flask-SocketIO -c pytorch -c conda-forge -IF %M%==1 pip install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3 -IF %M%==2 call conda install --no-shortcuts -y transformers -c huggingface -call conda clean -a -y -echo All done! +IF %D%==1 rmdir /s /q miniconda3 + +:Mode +echo Which installation mode would you like? +echo 1. Temporary Drive Letter (Mounts the folder as drive K:, more stable and portable) +echo 2. Subfolder (Traditional method, can't run in folder paths that contain spaces) +echo. +SET /P M=Type the number of the desired option and then press ENTER: +IF %M%==1 GOTO drivemap +IF %M%==2 GOTO subfolder +ECHO Incorrect choice +GOTO MODE + + +:drivemap +echo 1 > loader.settings +subst K: /D >nul +mkdir miniconda3 +subst K: miniconda3 +copy umamba.exe K:\umamba.exe +cd K: +umamba.exe create -r K:\python\ -n base +IF %B%==1 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\finetuneanon.yml" -y +IF %B%==2 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y +umamba.exe -r K:\ clean -a -y +subst K: /d pause +exit + +:subfolder +echo 2 > loader.settings +umamba.exe create -r miniconda3\ -n base +IF %B%==1 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\finetuneanon.yml -y +IF %B%==2 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y +umamba.exe clean -a -y +pause +exit diff --git a/play-cuda.sh b/play-cuda.sh new file mode 100644 index 00000000..94069beb --- /dev/null +++ b/play-cuda.sh @@ -0,0 +1,4 @@ +cd docker-cuda +xhost +local:docker +cp ../environments/finetuneanon.yml env.yml +docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*" diff --git a/play-rocm.sh b/play-rocm.sh new file mode 100644 index 00000000..c099369e --- /dev/null +++ b/play-rocm.sh @@ -0,0 +1,3 @@ +cd docker-rocm +xhost +local:docker +docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*" diff --git a/play.bat b/play.bat index 7a11b5d4..2d723fd9 100644 --- a/play.bat +++ b/play.bat @@ -1,6 +1,18 @@ @echo off +cd %~dp0 TITLE KoboldAI - Client +SET /P M=nul +call K:\python\condabin\activate +python aiserver.py +subst K: /D +cmd /k \ No newline at end of file diff --git a/umamba.exe b/umamba.exe new file mode 100644 index 00000000..9b06bf78 Binary files /dev/null and b/umamba.exe differ