mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-02-09 00:08:53 +01:00
LocalTunnel support
This commit is contained in:
parent
33733bf962
commit
b8e79afe5e
25
aiserver.py
25
aiserver.py
@ -778,6 +778,7 @@ def spRequest(filename):
|
|||||||
parser = argparse.ArgumentParser(description="KoboldAI Server")
|
parser = argparse.ArgumentParser(description="KoboldAI Server")
|
||||||
parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play")
|
parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play")
|
||||||
parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok")
|
parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok")
|
||||||
|
parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel")
|
||||||
parser.add_argument("--host", action='store_true', help="Optimizes KoboldAI for Remote Play without using a proxy service")
|
parser.add_argument("--host", action='store_true', help="Optimizes KoboldAI for Remote Play without using a proxy service")
|
||||||
parser.add_argument("--model", help="Specify the Model Type to skip the Menu")
|
parser.add_argument("--model", help="Specify the Model Type to skip the Menu")
|
||||||
parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)")
|
parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)")
|
||||||
@ -823,6 +824,9 @@ if args.remote:
|
|||||||
if args.ngrok:
|
if args.ngrok:
|
||||||
vars.host = True;
|
vars.host = True;
|
||||||
|
|
||||||
|
if args.localtunnel:
|
||||||
|
vars.host = True;
|
||||||
|
|
||||||
if args.host:
|
if args.host:
|
||||||
vars.host = True;
|
vars.host = True;
|
||||||
|
|
||||||
@ -5327,13 +5331,30 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
#socketio.run(app, host='0.0.0.0', port=5000)
|
#socketio.run(app, host='0.0.0.0', port=5000)
|
||||||
if(vars.host):
|
if(vars.host):
|
||||||
if(args.ngrok):
|
if(args.localtunnel):
|
||||||
|
import subprocess
|
||||||
|
localtunnel = subprocess.Popen(['lt', '-p', '5000', 'http'], shell=True, stdout=subprocess.PIPE)
|
||||||
|
attempts = 0
|
||||||
|
while attempts < 10:
|
||||||
|
try:
|
||||||
|
cloudflare = str(localtunnel.stdout.readline())
|
||||||
|
cloudflare = (re.search("(?P<url>https?:\/\/[^\s]+loca.lt)", cloudflare).group("url"))
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
attempts += 1
|
||||||
|
time.sleep(3)
|
||||||
|
continue
|
||||||
|
if attempts == 10:
|
||||||
|
print("LocalTunnel could not be created, falling back to cloudflare...")
|
||||||
|
from flask_cloudflared import _run_cloudflared
|
||||||
|
cloudflare = _run_cloudflared(5000)
|
||||||
|
elif(args.ngrok):
|
||||||
from flask_ngrok import _run_ngrok
|
from flask_ngrok import _run_ngrok
|
||||||
cloudflare = _run_ngrok()
|
cloudflare = _run_ngrok()
|
||||||
elif(args.remote):
|
elif(args.remote):
|
||||||
from flask_cloudflared import _run_cloudflared
|
from flask_cloudflared import _run_cloudflared
|
||||||
cloudflare = _run_cloudflared(5000)
|
cloudflare = _run_cloudflared(5000)
|
||||||
if(args.ngrok or args.remote):
|
if(args.localtunnel or args.ngrok or args.remote):
|
||||||
with open('cloudflare.log', 'w') as cloudflarelog:
|
with open('cloudflare.log', 'w') as cloudflarelog:
|
||||||
cloudflarelog.write("KoboldAI has finished loading and is available at the following link : " + cloudflare)
|
cloudflarelog.write("KoboldAI has finished loading and is available at the following link : " + cloudflare)
|
||||||
print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link : " + cloudflare + format(colors.END))
|
print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link : " + cloudflare + format(colors.END))
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# KoboldAI Easy Colab Deployment Script by Henk717
|
# KoboldAI Easy Colab Deployment Script by Henk717
|
||||||
|
|
||||||
# read the options
|
# read the options
|
||||||
TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b:s: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch:savemodel: -- "$@"`
|
TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b:s:o: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch:savemodel:localtunnel: -- "$@"`
|
||||||
eval set -- "$TEMP"
|
eval set -- "$TEMP"
|
||||||
|
|
||||||
# extract options and their arguments into variables.
|
# extract options and their arguments into variables.
|
||||||
@ -17,7 +17,9 @@ while true ; do
|
|||||||
-c|--configname)
|
-c|--configname)
|
||||||
configname=" --configname $2" ; shift 2 ;;
|
configname=" --configname $2" ; shift 2 ;;
|
||||||
-n|--ngrok)
|
-n|--ngrok)
|
||||||
configname=" --ngrok" ; shift 2 ;;
|
ngrok=" --ngrok" ; shift 2 ;;
|
||||||
|
-o|--localtunnel)
|
||||||
|
localtunnel=" --localtunnel" ; shift 2 ;;
|
||||||
-d|--download)
|
-d|--download)
|
||||||
download="$2" ; shift 2 ;;
|
download="$2" ; shift 2 ;;
|
||||||
-a|--aria2)
|
-a|--aria2)
|
||||||
@ -51,7 +53,7 @@ function launch
|
|||||||
else
|
else
|
||||||
cd /content/KoboldAI-Client
|
cd /content/KoboldAI-Client
|
||||||
echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab"
|
echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab"
|
||||||
python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab
|
python3 aiserver.py$model$kmpath$configname$ngrok$localtunnel$savemodel --colab
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -159,8 +161,9 @@ if [ "$init" != "skip" ]; then
|
|||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Make sure Colab has netbase
|
# Make sure Colab has the system dependencies
|
||||||
sudo apt install netbase -y
|
sudo apt install netbase -y
|
||||||
|
npm install -g localtunnel
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cd /content
|
cd /content
|
||||||
|
0
commandline-rocm.sh
Normal file → Executable file
0
commandline-rocm.sh
Normal file → Executable file
0
commandline.sh
Normal file → Executable file
0
commandline.sh
Normal file → Executable file
@ -1,3 +1,3 @@
|
|||||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||||
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
|
||||||
bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py
|
bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py $*
|
||||||
|
2
play.sh
2
play.sh
@ -1,3 +1,3 @@
|
|||||||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||||
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
|
||||||
bin/micromamba run -r runtime -n koboldai python aiserver.py
|
bin/micromamba run -r runtime -n koboldai python aiserver.py $*
|
||||||
|
Loading…
x
Reference in New Issue
Block a user