diff --git a/aiserver.py b/aiserver.py index 38b50846..bc5cc3ba 100644 --- a/aiserver.py +++ b/aiserver.py @@ -738,7 +738,7 @@ parser.add_argument("--nobreakmodel", action='store_true', help="Disables Breakm parser.add_argument("--unblock", action='store_true', default=False, help="Unblocks the KoboldAI port to be accessible from other machines without optimizing for remote play (It is recommended to use --host instead)") parser.add_argument("--quiet", action='store_true', default=False, help="If present will suppress any story related text from showing on the console") parser.add_argument("--lowmem", action='store_true', help="Extra Low Memory loading for the GPU, slower but memory does not peak to twice the usage") - +parser.add_argument("--savemodel", action='store_true', help="Saves the model to the models folder even if --colab is used (Allows you to save models to Google Drive)") args: argparse.Namespace = None if(os.environ.get("KOBOLDAI_ARGS") is not None): import shlex @@ -1420,7 +1420,7 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go except Exception as e: model = GPTNeoForCausalLM.from_pretrained(vars.model, cache_dir="cache", **lowmem) - if not args.colab: + if not args.colab or args.savemodel: import shutil model = model.half() model.save_pretrained("models/{}".format(vars.model.replace('/', '_'))) diff --git a/colabkobold.sh b/colabkobold.sh index a7744c1c..cf835f38 100644 --- a/colabkobold.sh +++ b/colabkobold.sh @@ -2,7 +2,7 @@ # KoboldAI Easy Colab Deployment Script by Henk717 # read the options -TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch: -- "$@"` +TEMP=`getopt -o m:i:p:c:d:x:a:l:z:g:t:n:b:s: --long model:,init:,path:,configname:,download:,aria2:,dloc:xloc:7z:git:tar:ngrok:branch:savemodel: -- "$@"` eval set -- "$TEMP" # extract options and their arguments into variables. @@ -34,6 +34,8 @@ while true ; do git="$2" ; shift 2 ;; -b|--branch) branch="$2" ; shift 2 ;; + -s|--savemodel) + savemodel=" --savemodel" ; shift 2 ;; --) shift ; break ;; *) echo "Internal error!" ; exit 1 ;; esac @@ -48,8 +50,8 @@ function launch exit 0 else cd /content/KoboldAI-Client - echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok --colab" - python3 aiserver.py$model$kmpath$configname$ngrok --colab + echo "Launching KoboldAI with the following options : python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab" + python3 aiserver.py$model$kmpath$configname$ngrok$savemodel --colab exit fi }