mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Fix for tortoise tts install script
This commit is contained in:
@@ -21,7 +21,8 @@ ECHO Runtime launching in subfolder mode
|
||||
call miniconda3\condabin\activate
|
||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
||||
pip install -r requirements.txt --no-dependencies
|
||||
REM pip install -r requirements.txt --no-dependencies
|
||||
umamba.exe install --no-shortcuts -r miniconda3 -n base -f environments\huggingface.yml -y --always-copy
|
||||
cmd /k
|
||||
pause
|
||||
exit
|
||||
@@ -33,7 +34,8 @@ subst K: miniconda3 >nul
|
||||
call K:\python\condabin\activate
|
||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
||||
pip install -r requirements.txt --no-dependencies
|
||||
REM pip install -r requirements.txt --no-dependencies
|
||||
umamba.exe install --no-shortcuts -r K:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
cmd /k
|
||||
pause
|
||||
exit
|
||||
@@ -45,7 +47,8 @@ subst B: miniconda3 >nul
|
||||
call B:\python\condabin\activate
|
||||
pip install git+https://github.com/neonbjb/tortoise-tts progressbar inflect librosa rotary-embedding-torch unidecode lazy_loader llvmlite numba joblib decorator audioread msgpack pooch scikit-learn soundfile soxr platformdirs threadpoolctl pydantic-core annotated-types pydantic --no-dependencies
|
||||
pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
||||
pip install -r requirements.txt --no-dependencies
|
||||
REM pip install -r requirements.txt --no-dependencies
|
||||
umamba.exe install --no-shortcuts -r B:\python\ -n base -f "%~dp0\environments\huggingface.yml" -y --always-copy
|
||||
cmd /k
|
||||
pause
|
||||
exit
|
@@ -1,4 +1,28 @@
|
||||
#!/bin/bash
|
||||
bin/micromamba run -r runtime -n koboldai pip install git+https://github.com/neonbjb/tortoise-tts OmegaConf deepspeed
|
||||
bin/micromamba run -r runtime -n koboldai pip install torchaudio --index-url https://download.pytorch.org/whl/cu118
|
||||
bin/micromamba run -r runtime -n koboldai pip install -r requirements.txt --no-dependencies
|
||||
|
||||
export PYTHONNOUSERSITE=1
|
||||
git submodule update --init --recursive
|
||||
if [[ $1 = "cuda" || $1 = "CUDA" ]]; then
|
||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
||||
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
||||
exit
|
||||
fi
|
||||
if [[ $1 = "rocm" || $1 = "ROCM" ]]; then
|
||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
||||
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
||||
exit
|
||||
fi
|
||||
if [[ $1 = "ipex" || $1 = "IPEX" ]]; then
|
||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||
bin/micromamba create -f environments/ipex.yml -r runtime -n koboldai-ipex -y
|
||||
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
|
||||
bin/micromamba create -f environments/ipex.yml -r runtime -n koboldai-ipex -y
|
||||
exit
|
||||
fi
|
||||
echo Please specify either CUDA or ROCM or IPEX
|
||||
|
Reference in New Issue
Block a user