mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-02-17 12:10:49 +01:00
The Mamba Installer Update
Big overhaul of the installer, partially based on the #53 commit from LexSong. The following is new : - Conda has been replaced with MicroMamba, allows the dependencies to automatically download the best version of Python and prevents all the issues with people failing to download conda. - The installer now has more options so you can choose not to delete the existing files, and it has new optional virtual K: drive support to bypass all the pathing issues people are having (Sorry Windows 7 users, its still not compatible even now). - Docker support for Linux systems has been added including ROCm support. - Environment files are now used to more easily keep everything on track, and to allow conda users to manually create environments across all operating systems (ROCm is an outlier in this because i have to use AMD's Pytorch docker for now it was to much hassle getting their Conda to use the environment file to add it to this commit). - Play.bat has been changed to allow the virtual drive support, everything should still be compatible with old installations as I kept all the paths intact.
This commit is contained in:
parent
97aba93ef1
commit
1327bd30a5
8
docker-cuda/Dockerfile
Normal file
8
docker-cuda/Dockerfile
Normal file
@ -0,0 +1,8 @@
|
||||
# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and
|
||||
# installs the rocm-dev package.
|
||||
|
||||
FROM mambaorg/micromamba
|
||||
WORKDIR /content/
|
||||
COPY env.yml /home/micromamba/env.yml
|
||||
RUN apt update && apt install xorg -y
|
||||
RUN micromamba install -y -n base -f /home/micromamba/env.yml
|
16
docker-cuda/docker-compose.yml
Normal file
16
docker-cuda/docker-compose.yml
Normal file
@ -0,0 +1,16 @@
|
||||
version: "2"
|
||||
services:
|
||||
koboldai:
|
||||
build: .
|
||||
environment:
|
||||
- DISPLAY=${DISPLAY}
|
||||
network_mode: "host"
|
||||
volumes:
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
- ../:/content/
|
||||
- $HOME/.Xauthority:/home/micromamba/.Xauthority:rw
|
||||
devices:
|
||||
- /dev/kfd
|
||||
- /dev/dri
|
||||
group_add:
|
||||
- video
|
6
docker-rocm/Dockerfile
Normal file
6
docker-rocm/Dockerfile
Normal file
@ -0,0 +1,6 @@
|
||||
# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and
|
||||
# installs the rocm-dev package.
|
||||
|
||||
FROM rocm/pytorch
|
||||
RUN conda install --all -y Flask-SocketIO
|
||||
RUN pip3 install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3
|
16
docker-rocm/docker-compose.yml
Normal file
16
docker-rocm/docker-compose.yml
Normal file
@ -0,0 +1,16 @@
|
||||
version: "2"
|
||||
services:
|
||||
koboldai:
|
||||
build: .
|
||||
environment:
|
||||
- DISPLAY=${DISPLAY}
|
||||
network_mode: "host"
|
||||
volumes:
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
- ../:/content/
|
||||
- $HOME/.Xauthority:/root/.Xauthority:rw
|
||||
devices:
|
||||
- /dev/kfd
|
||||
- /dev/dri
|
||||
group_add:
|
||||
- video
|
15
environments/finetuneanon.yml
Normal file
15
environments/finetuneanon.yml
Normal file
@ -0,0 +1,15 @@
|
||||
name: koboldai
|
||||
channels:
|
||||
- pytorch
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- pytorch
|
||||
- tensorflow-gpu
|
||||
- python=3.8.*
|
||||
- pip
|
||||
- git
|
||||
- pip:
|
||||
- git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3-rp-b
|
12
environments/huggingface.yml
Normal file
12
environments/huggingface.yml
Normal file
@ -0,0 +1,12 @@
|
||||
name: koboldai
|
||||
channels:
|
||||
- huggingface
|
||||
- pytorch
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- pytorch
|
||||
- tensorflow-gpu
|
||||
- transformers
|
@ -1,31 +1,62 @@
|
||||
@echo off
|
||||
title Installing Portable Python (Miniconda3)
|
||||
echo Miniconda3's installer will overwrite existing Miniconda3 shortcuts in the startmenu (We currently can not prevent this)
|
||||
title KoboldAI Runtime Installer (MicroMamba)
|
||||
echo Please choose one of the following transformers options
|
||||
echo 1. Finetuneanon Transformers
|
||||
echo 2. Official Transformers (Only use this if your model does not support half)
|
||||
echo 1. Finetuneanon Transformers (Best for GPU users)
|
||||
echo 2. Official Transformers (Best for CPU users)
|
||||
echo.
|
||||
echo Errors? Rerun this as admin so it can add the needed registery tweak.
|
||||
echo Errors? Rerun this as admin so it can add the needed LongPathsEnabled registery tweak.
|
||||
echo Installer failed or crashed? Run it again so it can continue.
|
||||
echo Only Windows 10 and higher officially supported, older Windows installations can't handle the paths.
|
||||
echo.
|
||||
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
SET /P B=Type the number of the desired option and then press ENTER:
|
||||
|
||||
Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled" /t REG_DWORD /d "1" /f 2>nul
|
||||
|
||||
cd %~dp0
|
||||
rmdir /s /q miniconda3
|
||||
where /q curl.exe
|
||||
IF ERRORLEVEL 1 (
|
||||
bitsadmin /transfer miniconda /download /priority normal https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe "%~dp0\miniconda3.exe"
|
||||
|
||||
if exist miniconda3\ (
|
||||
echo Delete existing installation?
|
||||
echo This is required if you are switching modes, or if you get dependency errors in the game.
|
||||
echo 1. Yes
|
||||
echo 2. No
|
||||
SET /P D=Type the number of the desired option and then press ENTER:
|
||||
) ELSE (
|
||||
curl -o miniconda3.exe https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe
|
||||
SET D=Workaround
|
||||
)
|
||||
miniconda3.exe /S /InstallationType=JustMe /RegisterPython=0 /AddTopath=0 /NoScripts=1 /NoRegistry=1 /D=%~dp0\miniconda3
|
||||
del miniconda3.exe
|
||||
call miniconda3\condabin\activate
|
||||
call conda install --all --no-shortcuts -y git pytorch tensorflow-gpu colorama Flask-SocketIO -c pytorch -c conda-forge
|
||||
IF %M%==1 pip install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3
|
||||
IF %M%==2 call conda install --no-shortcuts -y transformers -c huggingface
|
||||
call conda clean -a -y
|
||||
echo All done!
|
||||
IF %D%==1 rmdir /s /q miniconda3
|
||||
|
||||
:Mode
|
||||
echo Which installation mode would you like?
|
||||
echo 1. Temporary Drive Letter (Mounts the folder as drive K:, more stable and portable)
|
||||
echo 2. Subfolder (Traditional method, can't run in folder paths that contain spaces)
|
||||
echo.
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
IF %M%==1 GOTO drivemap
|
||||
IF %M%==2 GOTO subfolder
|
||||
ECHO Incorrect choice
|
||||
GOTO MODE
|
||||
|
||||
|
||||
:drivemap
|
||||
echo 1 > loader.settings
|
||||
subst K: /D >nul
|
||||
mkdir miniconda3
|
||||
subst K: miniconda3
|
||||
copy umamba.exe K:\umamba.exe
|
||||
cd K:
|
||||
umamba.exe create -r K:\python\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\finetuneanon.yml" -y
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y
|
||||
umamba.exe -r K:\ clean -a -y
|
||||
subst K: /d
|
||||
pause
|
||||
exit
|
||||
|
||||
:subfolder
|
||||
echo 2 > loader.settings
|
||||
umamba.exe create -r miniconda3\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\finetuneanon.yml -y
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y
|
||||
umamba.exe clean -a -y
|
||||
pause
|
||||
exit
|
||||
|
4
play-cuda.sh
Normal file
4
play-cuda.sh
Normal file
@ -0,0 +1,4 @@
|
||||
cd docker-cuda
|
||||
xhost +local:docker
|
||||
cp ../environments/finetuneanon.yml env.yml
|
||||
docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*"
|
3
play-rocm.sh
Normal file
3
play-rocm.sh
Normal file
@ -0,0 +1,3 @@
|
||||
cd docker-rocm
|
||||
xhost +local:docker
|
||||
docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*"
|
14
play.bat
14
play.bat
@ -1,6 +1,18 @@
|
||||
@echo off
|
||||
cd %~dp0
|
||||
TITLE KoboldAI - Client
|
||||
SET /P M=<loader.settings
|
||||
IF %M%==1 GOTO drivemap
|
||||
IF %M%==2 GOTO subfolder
|
||||
|
||||
:subfolder
|
||||
call miniconda3\condabin\activate
|
||||
cls
|
||||
python aiserver.py
|
||||
cmd /k
|
||||
|
||||
:drivemap
|
||||
subst K: miniconda3 >nul
|
||||
call K:\python\condabin\activate
|
||||
python aiserver.py
|
||||
subst K: /D
|
||||
cmd /k
|
BIN
umamba.exe
Normal file
BIN
umamba.exe
Normal file
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user