Merge branch 'KoboldAI:main' into main
This commit is contained in:
commit
265cdb802f
|
@ -0,0 +1,8 @@
|
|||
# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and
|
||||
# installs the rocm-dev package.
|
||||
|
||||
FROM mambaorg/micromamba
|
||||
WORKDIR /content/
|
||||
COPY env.yml /home/micromamba/env.yml
|
||||
RUN apt update && apt install xorg -y
|
||||
RUN micromamba install -y -n base -f /home/micromamba/env.yml
|
|
@ -0,0 +1,16 @@
|
|||
version: "2"
|
||||
services:
|
||||
koboldai:
|
||||
build: .
|
||||
environment:
|
||||
- DISPLAY=${DISPLAY}
|
||||
network_mode: "host"
|
||||
volumes:
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
- ../:/content/
|
||||
- $HOME/.Xauthority:/home/micromamba/.Xauthority:rw
|
||||
devices:
|
||||
- /dev/kfd
|
||||
- /dev/dri
|
||||
group_add:
|
||||
- video
|
|
@ -0,0 +1,6 @@
|
|||
# This dockerfile is meant to serve as a rocm base image. It registers the debian rocm package repository, and
|
||||
# installs the rocm-dev package.
|
||||
|
||||
FROM rocm/pytorch
|
||||
RUN conda install --all -y Flask-SocketIO
|
||||
RUN pip3 install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3
|
|
@ -0,0 +1,16 @@
|
|||
version: "2"
|
||||
services:
|
||||
koboldai:
|
||||
build: .
|
||||
environment:
|
||||
- DISPLAY=${DISPLAY}
|
||||
network_mode: "host"
|
||||
volumes:
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
- ../:/content/
|
||||
- $HOME/.Xauthority:/root/.Xauthority:rw
|
||||
devices:
|
||||
- /dev/kfd
|
||||
- /dev/dri
|
||||
group_add:
|
||||
- video
|
|
@ -0,0 +1,15 @@
|
|||
name: koboldai
|
||||
channels:
|
||||
- pytorch
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- pytorch
|
||||
- tensorflow-gpu
|
||||
- python=3.8.*
|
||||
- pip
|
||||
- git
|
||||
- pip:
|
||||
- git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3-rp-b
|
|
@ -0,0 +1,12 @@
|
|||
name: koboldai
|
||||
channels:
|
||||
- huggingface
|
||||
- pytorch
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- colorama
|
||||
- flask-socketio
|
||||
- pytorch
|
||||
- tensorflow-gpu
|
||||
- transformers
|
|
@ -1,31 +1,62 @@
|
|||
@echo off
|
||||
title Installing Portable Python (Miniconda3)
|
||||
echo Miniconda3's installer will overwrite existing Miniconda3 shortcuts in the startmenu (We currently can not prevent this)
|
||||
title KoboldAI Runtime Installer (MicroMamba)
|
||||
echo Please choose one of the following transformers options
|
||||
echo 1. Finetuneanon Transformers
|
||||
echo 2. Official Transformers (Only use this if your model does not support half)
|
||||
echo 1. Finetuneanon Transformers (Best for GPU users)
|
||||
echo 2. Official Transformers (Best for CPU users)
|
||||
echo.
|
||||
echo Errors? Rerun this as admin so it can add the needed registery tweak.
|
||||
echo Errors? Rerun this as admin so it can add the needed LongPathsEnabled registery tweak.
|
||||
echo Installer failed or crashed? Run it again so it can continue.
|
||||
echo Only Windows 10 and higher officially supported, older Windows installations can't handle the paths.
|
||||
echo.
|
||||
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
SET /P B=Type the number of the desired option and then press ENTER:
|
||||
|
||||
Reg add "HKLM\SYSTEM\CurrentControlSet\Control\FileSystem" /v "LongPathsEnabled" /t REG_DWORD /d "1" /f 2>nul
|
||||
|
||||
cd %~dp0
|
||||
rmdir /s /q miniconda3
|
||||
where /q curl.exe
|
||||
IF ERRORLEVEL 1 (
|
||||
bitsadmin /transfer miniconda /download /priority normal https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe "%~dp0\miniconda3.exe"
|
||||
|
||||
if exist miniconda3\ (
|
||||
echo Delete existing installation?
|
||||
echo This is required if you are switching modes, or if you get dependency errors in the game.
|
||||
echo 1. Yes
|
||||
echo 2. No
|
||||
SET /P D=Type the number of the desired option and then press ENTER:
|
||||
) ELSE (
|
||||
curl -o miniconda3.exe https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe
|
||||
SET D=Workaround
|
||||
)
|
||||
miniconda3.exe /S /InstallationType=JustMe /RegisterPython=0 /AddTopath=0 /NoScripts=1 /NoRegistry=1 /D=%~dp0\miniconda3
|
||||
del miniconda3.exe
|
||||
call miniconda3\condabin\activate
|
||||
call conda install --all --no-shortcuts -y git pytorch tensorflow-gpu colorama Flask-SocketIO -c pytorch -c conda-forge
|
||||
IF %M%==1 pip install git+https://github.com/finetuneanon/transformers@gpt-neo-localattention3
|
||||
IF %M%==2 call conda install --no-shortcuts -y transformers -c huggingface
|
||||
call conda clean -a -y
|
||||
echo All done!
|
||||
IF %D%==1 rmdir /s /q miniconda3
|
||||
|
||||
:Mode
|
||||
echo Which installation mode would you like?
|
||||
echo 1. Temporary Drive Letter (Mounts the folder as drive K:, more stable and portable)
|
||||
echo 2. Subfolder (Traditional method, can't run in folder paths that contain spaces)
|
||||
echo.
|
||||
SET /P M=Type the number of the desired option and then press ENTER:
|
||||
IF %M%==1 GOTO drivemap
|
||||
IF %M%==2 GOTO subfolder
|
||||
ECHO Incorrect choice
|
||||
GOTO MODE
|
||||
|
||||
|
||||
:drivemap
|
||||
echo 1 > loader.settings
|
||||
subst K: /D >nul
|
||||
mkdir miniconda3
|
||||
subst K: miniconda3
|
||||
copy umamba.exe K:\umamba.exe
|
||||
cd K:
|
||||
umamba.exe create -r K:\python\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\finetuneanon.yml" -y
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y
|
||||
umamba.exe -r K:\ clean -a -y
|
||||
subst K: /d
|
||||
pause
|
||||
exit
|
||||
|
||||
:subfolder
|
||||
echo 2 > loader.settings
|
||||
umamba.exe create -r miniconda3\ -n base
|
||||
IF %B%==1 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\finetuneanon.yml -y
|
||||
IF %B%==2 umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y
|
||||
umamba.exe clean -a -y
|
||||
pause
|
||||
exit
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
cd docker-cuda
|
||||
xhost +local:docker
|
||||
cp ../environments/finetuneanon.yml env.yml
|
||||
docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*"
|
|
@ -0,0 +1,3 @@
|
|||
cd docker-rocm
|
||||
xhost +local:docker
|
||||
docker-compose run --service-ports koboldai bash -c "cd /content && python3 aiserver.py $*"
|
14
play.bat
14
play.bat
|
@ -1,6 +1,18 @@
|
|||
@echo off
|
||||
cd %~dp0
|
||||
TITLE KoboldAI - Client
|
||||
SET /P M=<loader.settings
|
||||
IF %M%==1 GOTO drivemap
|
||||
IF %M%==2 GOTO subfolder
|
||||
|
||||
:subfolder
|
||||
call miniconda3\condabin\activate
|
||||
cls
|
||||
python aiserver.py
|
||||
cmd /k
|
||||
|
||||
:drivemap
|
||||
subst K: miniconda3 >nul
|
||||
call K:\python\condabin\activate
|
||||
python aiserver.py
|
||||
subst K: /D
|
||||
cmd /k
|
Binary file not shown.
Loading…
Reference in New Issue