From b8e79afe5e8898eebd6b1c0db9061ad5b7f572ea Mon Sep 17 00:00:00 2001 From: Henk Date: Tue, 19 Apr 2022 13:47:44 +0200 Subject: [PATCH] LocalTunnel support --- aiserver.py | 25 +++++++++++++++++++++++-- colabkobold.sh | 11 +++++++---- commandline-rocm.sh | 0 commandline.sh | 0 play-rocm.sh | 2 +- play.sh | 2 +- 6 files changed, 32 insertions(+), 8 deletions(-) mode change 100644 => 100755 commandline-rocm.sh mode change 100644 => 100755 commandline.sh diff --git a/aiserver.py b/aiserver.py index ebdf7fa8..1a1d1f4c 100644 --- a/aiserver.py +++ b/aiserver.py @@ -778,6 +778,7 @@ def spRequest(filename): parser = argparse.ArgumentParser(description="KoboldAI Server") parser.add_argument("--remote", action='store_true', help="Optimizes KoboldAI for Remote Play") parser.add_argument("--ngrok", action='store_true', help="Optimizes KoboldAI for Remote Play using Ngrok") +parser.add_argument("--localtunnel", action='store_true', help="Optimizes KoboldAI for Remote Play using Localtunnel") parser.add_argument("--host", action='store_true', help="Optimizes KoboldAI for Remote Play without using a proxy service") parser.add_argument("--model", help="Specify the Model Type to skip the Menu") parser.add_argument("--path", help="Specify the Path for local models (For model NeoCustom or GPT2Custom)") @@ -823,6 +824,9 @@ if args.remote: if args.ngrok: vars.host = True; +if args.localtunnel: + vars.host = True; + if args.host: vars.host = True; @@ -5327,13 +5331,30 @@ if __name__ == "__main__": #socketio.run(app, host='0.0.0.0', port=5000) if(vars.host): - if(args.ngrok): + if(args.localtunnel): + import subprocess + localtunnel = subprocess.Popen(['lt', '-p', '5000', 'http'], shell=True, stdout=subprocess.PIPE) + attempts = 0 + while attempts < 10: + try: + cloudflare = str(localtunnel.stdout.readline()) + cloudflare = (re.search("(?Phttps?:\/\/[^\s]+loca.lt)", cloudflare).group("url")) + break + except: + attempts += 1 + time.sleep(3) + continue + if attempts == 10: + print("LocalTunnel could not be created, falling back to cloudflare...") + from flask_cloudflared import _run_cloudflared + cloudflare = _run_cloudflared(5000) + elif(args.ngrok): from flask_ngrok import _run_ngrok cloudflare = _run_ngrok() elif(args.remote): from flask_cloudflared import _run_cloudflared cloudflare = _run_cloudflared(5000) - if(args.ngrok or args.remote): + if(args.localtunnel or args.ngrok or args.remote): with open('cloudflare.log', 'w') as cloudflarelog: cloudflarelog.write("KoboldAI has finished loading and is available at the following link : " + cloudflare) print(format(colors.GREEN) + "KoboldAI has finished loading and is available at the following link : " + cloudflare + format(colors.END)) diff --git a/colabkobold.sh b/colabkobold.sh index cf835f38..3adfbc9d 100644 --- a/colabkobold.sh +++ b/colabkobold.sh @@ -2,7 +2,7 @@ # KoboldAI Easy Colab Deployment Script by Henk717 # read the options -TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b:s: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch:savemodel: -- "$@"` +TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b:s:o: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch:savemodel:localtunnel: -- "$@"` eval set -- "$TEMP" # extract options and their arguments into variables. @@ -17,7 +17,9 @@ while true ; do -c|--configname) configname=" --configname $2" ; shift 2 ;; -n|--ngrok) - configname=" --ngrok" ; shift 2 ;; + ngrok=" --ngrok" ; shift 2 ;; + -o|--localtunnel) + localtunnel=" --localtunnel" ; shift 2 ;; -d|--download) download="$2" ; shift 2 ;; -a|--aria2) @@ -51,7 +53,7 @@ function launch else cd /content/KoboldAI-Client echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab" - python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab + python3 aiserver.py$model$kmpath$configname$ngrok$localtunnel$savemodel --colab exit fi } @@ -159,8 +161,9 @@ if [ "$init" != "skip" ]; then pip install -r requirements.txt fi - # Make sure Colab has netbase + # Make sure Colab has the system dependencies sudo apt install netbase -y + npm install -g localtunnel fi cd /content diff --git a/commandline-rocm.sh b/commandline-rocm.sh old mode 100644 new mode 100755 diff --git a/commandline.sh b/commandline.sh old mode 100644 new mode 100755 diff --git a/play-rocm.sh b/play-rocm.sh index abff2106..8e3666a0 100755 --- a/play-rocm.sh +++ b/play-rocm.sh @@ -1,3 +1,3 @@ wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y -bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py +bin/micromamba run -r runtime -n koboldai-rocm python aiserver.py $* diff --git a/play.sh b/play.sh index 25f433d1..061cdc5e 100755 --- a/play.sh +++ b/play.sh @@ -1,3 +1,3 @@ wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y -bin/micromamba run -r runtime -n koboldai python aiserver.py +bin/micromamba run -r runtime -n koboldai python aiserver.py $*