mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Removed tortoise tts install scripts
This commit is contained in:
@@ -1,54 +0,0 @@
|
|||||||
@echo off
|
|
||||||
cd /D %~dp0
|
|
||||||
|
|
||||||
:Isolation
|
|
||||||
call conda deactivate 2>NUL
|
|
||||||
set Path=%windir%\system32;%windir%;C:\Windows\System32\Wbem;%windir%\System32\WindowsPowerShell\v1.0\;%windir%\System32\OpenSSH\
|
|
||||||
SET CONDA_SHLVL=
|
|
||||||
SET PYTHONNOUSERSITE=1
|
|
||||||
SET PYTHONPATH=
|
|
||||||
|
|
||||||
rmdir /S /Q flask_session 2>NUL
|
|
||||||
|
|
||||||
TITLE KoboldAI - Server
|
|
||||||
SET /P M=<loader.settings
|
|
||||||
IF %M%==1 GOTO drivemap
|
|
||||||
IF %M%==2 GOTO subfolder
|
|
||||||
IF %M%==3 GOTO drivemap_B
|
|
||||||
|
|
||||||
:subfolder
|
|
||||||
ECHO Runtime launching in subfolder mode
|
|
||||||
call miniconda3\condabin\activate
|
|
||||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
|
||||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
|
||||||
REM pip install -r requirements.txt --no-dependencies
|
|
||||||
umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y --always-copy
|
|
||||||
cmd /k
|
|
||||||
pause
|
|
||||||
exit
|
|
||||||
|
|
||||||
:drivemap
|
|
||||||
ECHO Runtime launching in K: drive mode
|
|
||||||
subst /D K: >nul
|
|
||||||
subst K: miniconda3 >nul
|
|
||||||
call K:\python\condabin\activate
|
|
||||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
|
||||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
|
||||||
REM pip install -r requirements.txt --no-dependencies
|
|
||||||
umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
|
||||||
cmd /k
|
|
||||||
pause
|
|
||||||
exit
|
|
||||||
|
|
||||||
:drivemap_B
|
|
||||||
ECHO Runtime launching in B: drive mode
|
|
||||||
subst /D B: >nul
|
|
||||||
subst B: miniconda3 >nul
|
|
||||||
call B:\python\condabin\activate
|
|
||||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
|
||||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
|
||||||
REM pip install -r requirements.txt --no-dependencies
|
|
||||||
umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
|
||||||
cmd /k
|
|
||||||
pause
|
|
||||||
exit
|
|
@@ -1,28 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
bin/micromamba run -r runtime -n koboldai pip install git+https://github.com/neonbjb/tortoise-tts OmegaConf deepspeed
|
|
||||||
bin/micromamba run -r runtime -n koboldai pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
|
||||||
|
|
||||||
export PYTHONNOUSERSITE=1
|
|
||||||
git submodule update --init --recursive
|
|
||||||
if [[ $1 = "cuda" || $1 = "CUDA" ]]; then
|
|
||||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
||||||
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
||||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
||||||
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
if [[ $1 = "rocm" || $1 = "ROCM" ]]; then
|
|
||||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
||||||
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
||||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
||||||
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
if [[ $1 = "ipex" || $1 = "IPEX" ]]; then
|
|
||||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
|
||||||
bin/micromamba create -f environments/ipex.yml -r runtime -n koboldai-ipex -y
|
|
||||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
|
||||||
bin/micromamba create -f environments/ipex.yml -r runtime -n koboldai-ipex -y
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
echo Please specify either CUDA or ROCM or IPEX
|
|
Reference in New Issue
Block a user