Colab file fix

This commit is contained in:
Ash
2023-02-02 02:02:15 +04:00
parent d37d624b76
commit b535250428

View File

@@ -1,161 +1,191 @@
#@title <b><-- Run</b>
#@markdown Models
Model = "Pygmalion 6B" #@param ["Nerys V2 6B", "Erebus 6B", "Skein 6B", "Janeway 6B", "Adventure 6B", "Pygmalion 6B", "Pygmalion 6B Dev", "Lit V2 6B", "Lit 6B", "Shinen 6B", "Nerys 2.7B", "AID 2.7B", "Erebus 2.7B", "Janeway 2.7B", "Picard 2.7B", "Horni LN 2.7B", "Horni 2.7B", "Shinen 2.7B", "OPT 2.7B", "Fairseq Dense 2.7B", "Neo 2.7B"] {allow-input: true}
Version = "Official" #@param ["Official", "United"] {allow-input: true}
Provider = "Cloudflare" #@param ["Localtunnel", "Cloudflare"]
use_google_drive = False #@param {type:"boolean"}
!nvidia-smi
import subprocess
import time
import sys
import os
from google.colab import drive
if use_google_drive:
drive.mount('/content/drive/')
else:
if not os.path.exists("/content/drive"):
os.mkdir("/content/drive")
if not os.path.exists("/content/drive/MyDrive/"):
os.mkdir("/content/drive/MyDrive/")
Revision = ""
if Model == "Nerys V2 6B":
Model = "KoboldAI/OPT-6B-nerys-v2"
path = ""
download = ""
elif Model == "Erebus 6B":
Model = "KoboldAI/OPT-6.7B-Erebus"
path = ""
download = ""
elif Model == "Skein 6B":
Model = "KoboldAI/GPT-J-6B-Skein"
path = ""
download = ""
elif Model == "Janeway 6B":
Model = "KoboldAI/GPT-J-6B-Janeway"
path = ""
download = ""
elif Model == "Adventure 6B":
Model = "KoboldAI/GPT-J-6B-Adventure"
path = ""
download = ""
elif Model == "Pygmalion 6B":
Model = "PygmalionAI/pygmalion-6b"
path = ""
download = ""
Version = "United"
elif Model == "Pygmalion 6B Dev":
Model = "PygmalionAI/pygmalion-6b"
Revision = "--revision dev"
path = ""
Version = "United"
download = ""
elif Model == "Lit V2 6B":
Model = "hakurei/litv2-6B-rev3"
path = ""
download = ""
elif Model == "Lit 6B":
Model = "hakurei/lit-6B"
path = ""
download = ""
elif Model == "Shinen 6B":
Model = "KoboldAI/GPT-J-6B-Shinen"
path = ""
download = ""
elif Model == "Nerys 2.7B":
Model = "KoboldAI/fairseq-dense-2.7B-Nerys"
path = ""
download = ""
elif Model == "Erebus 2.7B":
Model = "KoboldAI/OPT-2.7B-Erebus"
path = ""
download = ""
elif Model == "Janeway 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-Janeway"
path = ""
download = ""
elif Model == "Picard 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-Picard"
path = ""
download = ""
elif Model == "AID 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-AID"
path = ""
download = ""
elif Model == "Horni LN 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-Horni-LN"
path = ""
download = ""
elif Model == "Horni 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-Horni"
path = ""
download = ""
elif Model == "Shinen 2.7B":
Model = "KoboldAI/GPT-Neo-2.7B-Shinen"
path = ""
download = ""
elif Model == "Fairseq Dense 2.7B":
Model = "KoboldAI/fairseq-dense-2.7B"
path = ""
download = ""
elif Model == "OPT 2.7B":
Model = "facebook/opt-2.7b"
path = ""
download = ""
elif Model == "Neo 2.7B":
Model = "EleutherAI/gpt-neo-2.7B"
path = ""
download = ""
if Provider == "Localtunnel":
tunnel = "--localtunnel yes"
else:
tunnel = ""
#Henk's KoboldAI script
!wget https://koboldai.org/ckds && chmod +x ckds
!./ckds --init only
p = subprocess.Popen(['/content/ckds', '--model', Model], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#Tricks
url = ''
while True:
line = p.stdout.readline().decode().strip()
if "KoboldAI has finished loading and is available at the following link: " in line:
print(line)
url = line.split("KoboldAI has finished loading and is available at the following link: ")[1]
print(url)
break
if "KoboldAI has finished loading and is available at the following link for UI 1: " in line:
print(line)
url = line.split("KoboldAI has finished loading and is available at the following link for UI 1: ")[1]
print(url)
break
if not line:
break
print(line)
if "INIT" in line and "Transformers" in line:
print("Model loading... (It will take 2 - 5 minutes)")
#TavernAI
%cd /
!curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash
!nvm install 19.1.0
!nvm use 19.1.0
!node -v
!git clone https://github.com/TavernAI/TavernAI.git
%cd TavernAI
!npm install
time.sleep(1)
%env colab=2
%env colaburl=$url
!nohup node server.js &
time.sleep(3)
print('###KoboldAI###')
print(url)
print('')
print('###TavernAI LINK###')
!lt --port 8000
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU",
"gpuClass": "standard"
},
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "hps3qtPLFNBb"
},
"outputs": [],
"source": [
"#@title <b><-- Select your model below and then click this to start KoboldAI</b>\n",
"#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n",
"\n",
"Model = \"Pygmalion 6B\" #@param [\"Nerys V2 6B\", \"Erebus 6B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Pygmalion 6B\", \"Pygmalion 6B Dev\", \"Lit V2 6B\", \"Lit 6B\", \"Shinen 6B\", \"Nerys 2.7B\", \"AID 2.7B\", \"Erebus 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"OPT 2.7B\", \"Fairseq Dense 2.7B\", \"Neo 2.7B\"] {allow-input: true}\n",
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
"Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
"use_google_drive = False #@param {type:\"boolean\"}\n",
"\n",
"!nvidia-smi\n",
"import subprocess\n",
"import time\n",
"import sys\n",
"import os\n",
"from google.colab import drive\n",
"if use_google_drive:\n",
" drive.mount('/content/drive/')\n",
"else:\n",
" if not os.path.exists(\"/content/drive\"):\n",
" os.mkdir(\"/content/drive\")\n",
" if not os.path.exists(\"/content/drive/MyDrive/\"):\n",
" os.mkdir(\"/content/drive/MyDrive/\")\n",
"\n",
"Revision = \"\"\n",
"\n",
"if Model == \"Nerys V2 6B\":\n",
" Model = \"KoboldAI/OPT-6B-nerys-v2\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Erebus 6B\":\n",
" Model = \"KoboldAI/OPT-6.7B-Erebus\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Skein 6B\":\n",
" Model = \"KoboldAI/GPT-J-6B-Skein\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Janeway 6B\":\n",
" Model = \"KoboldAI/GPT-J-6B-Janeway\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Adventure 6B\":\n",
" Model = \"KoboldAI/GPT-J-6B-Adventure\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Pygmalion 6B\":\n",
" Model = \"PygmalionAI/pygmalion-6b\"\n",
" path = \"\"\n",
" download = \"\"\n",
" Version = \"United\"\n",
"elif Model == \"Pygmalion 6B Dev\":\n",
" Model = \"PygmalionAI/pygmalion-6b\"\n",
" Revision = \"--revision dev\"\n",
" path = \"\"\n",
" Version = \"United\"\n",
" download = \"\"\n",
"elif Model == \"Lit V2 6B\":\n",
" Model = \"hakurei/litv2-6B-rev3\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Lit 6B\":\n",
" Model = \"hakurei/lit-6B\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Shinen 6B\":\n",
" Model = \"KoboldAI/GPT-J-6B-Shinen\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Nerys 2.7B\":\n",
" Model = \"KoboldAI/fairseq-dense-2.7B-Nerys\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Erebus 2.7B\":\n",
" Model = \"KoboldAI/OPT-2.7B-Erebus\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Janeway 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-Janeway\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Picard 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-Picard\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"AID 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-AID\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Horni LN 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-Horni-LN\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Horni 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-Horni\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Shinen 2.7B\":\n",
" Model = \"KoboldAI/GPT-Neo-2.7B-Shinen\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Fairseq Dense 2.7B\":\n",
" Model = \"KoboldAI/fairseq-dense-2.7B\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"OPT 2.7B\":\n",
" Model = \"facebook/opt-2.7b\"\n",
" path = \"\"\n",
" download = \"\"\n",
"elif Model == \"Neo 2.7B\":\n",
" Model = \"EleutherAI/gpt-neo-2.7B\"\n",
" path = \"\"\n",
" download = \"\"\n",
"\n",
"if Provider == \"Localtunnel\":\n",
" tunnel = \"--localtunnel yes\"\n",
"else:\n",
" tunnel = \"\"\n",
"\n",
"#Henk's KoboldAI script\n",
"!wget https://koboldai.org/ckds && chmod +x ckds\n",
"!./ckds --init only\n",
"p = subprocess.Popen(['/content/ckds', '--model', Model], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
"\n",
"#Tricks\n",
"url = ''\n",
"while True:\n",
" line = p.stdout.readline().decode().strip()\n",
" if \"KoboldAI has finished loading and is available at the following link: \" in line:\n",
" print(line)\n",
" url = line.split(\"KoboldAI has finished loading and is available at the following link: \")[1]\n",
" print(url)\n",
" break\n",
" if \"KoboldAI has finished loading and is available at the following link for UI 1: \" in line:\n",
" print(line)\n",
" url = line.split(\"KoboldAI has finished loading and is available at the following link for UI 1: \")[1]\n",
" print(url)\n",
" break\n",
" if not line:\n",
" break\n",
" print(line)\n",
" if \"INIT\" in line and \"Transformers\" in line:\n",
" print(\"Model loading... (It will take 2 - 5 minutes)\")\n",
"\n",
"#TavernAI\n",
"%cd /\n",
"!curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n",
"!nvm install 19.1.0\n",
"!nvm use 19.1.0\n",
"!node -v\n",
"!git clone https://github.com/TavernAI/TavernAI.git\n",
"%cd TavernAI\n",
"!npm install\n",
"time.sleep(1)\n",
"%env colab=2\n",
"%env colaburl=$url\n",
"!nohup node server.js &\n",
"time.sleep(3)\n",
"print('###KoboldAI###')\n",
"print(url)\n",
"print('')\n",
"print('###TavernAI LINK###')\n",
"!lt --port 8000\n"
]
}
]
}