Files
SillyTavern/colab/GPU.ipynb
2023-04-06 22:44:36 +03:00

421 lines
19 KiB
Plaintext

{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"**Links**<br>\n",
"Cohee's TavernAI fork Github https://github.com/Cohee1207/SillyTavern<br>\n",
"Cohee's TavernAI Extras Github https://github.com/Cohee1207/TavernAI-extras/<br>\n",
"TavernAI Discord https://discord.gg/zmK2gmr45t<br>\n",
"Questions? Hit me up on Discord: Cohee#1207"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "_1gpebrnlp5-"
},
"outputs": [],
"source": [
"#@title <b><-- Convert TavernAI characters to SillyTavern format</b>\n",
"\n",
"!mkdir /convert\n",
"%cd /convert\n",
"\n",
"import os\n",
"from google.colab import drive\n",
"\n",
"drive.mount(\"/convert/drive\")\n",
"\n",
"!git clone -b tools https://github.com/EnergoStalin/SillyTavern.git\n",
"%cd SillyTavern\n",
"\n",
"!curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n",
"!nvm install 19.1.0\n",
"!nvm use 19.1.0\n",
"\n",
"%cd tools/charaverter\n",
"\n",
"!npm i\n",
"\n",
"path = \"/convert/drive/MyDrive/TavernAI/characters\"\n",
"output = \"/convert/drive/MyDrive/SillyTavern/characters\"\n",
"if not os.path.exists(path):\n",
" path = output\n",
"\n",
"!mkdir -p $output\n",
"!node main.mjs $path $output\n",
"\n",
"drive.flush_and_unmount()\n",
"\n",
"%cd /\n",
"!rm -rf /convert"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "ewkXkyiFP2Hq"
},
"outputs": [],
"source": [
"#@title <-- Tap this if you play on Mobile { display-mode: \"form\" }\n",
"%%html\n",
"<b>Press play on the music player to keep the tab alive, then start KoboldAI below (Uses only 13MB of data)</b><br/>\n",
"<audio src=\"https://raw.githubusercontent.com/KoboldAI/KoboldAI-Client/main/colab/silence.m4a\" controls>"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "lVftocpwCoYw"
},
"outputs": [],
"source": [
"#@title <b><-- Select your model below and then click this to start KoboldAI</b>\n",
"\n",
"Model = \"Pygmalion 6B\" #@param [\"Nerys V2 6B\", \"Erebus 6B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Pygmalion 6B\", \"Pygmalion 6B Dev\", \"Lit V2 6B\", \"Lit 6B\", \"Shinen 6B\", \"Nerys 2.7B\", \"AID 2.7B\", \"Erebus 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"OPT 2.7B\", \"Fairseq Dense 2.7B\", \"Neo 2.7B\", \"Pygway 6B\", \"Nerybus 6.7B\", \"Pygway v8p4\", \"PPO-Janeway 6B\", \"PPO Shygmalion 6B\", \"LLaMA 7B\", \"Janin-GPTJ\", \"Javelin-GPTJ\", \"Javelin-R\", \"Janin-R\", \"Javalion-R\", \"Javalion-GPTJ\", \"Javelion-6B\", \"GPT-J-Pyg-PPO-6B\", \"ppo_hh_pythia-6B\", \"ppo_hh_gpt-j\", \"GPT-J-Pyg_PPO-6B\", \"GPT-J-Pyg_PPO-6B-Dev-V8p4\", \"Dolly_GPT-J-6b\", \"Dolly_Pyg-6B\"] {allow-input: true}\n",
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
"Provider = \"Localtunnel\" #@param [\"Localtunnel\"]\n",
"ForceInitSteps = [] #@param {allow-input: true}\n",
"UseGoogleDrive = True #@param {type:\"boolean\"}\n",
"StartKoboldAI = True #@param {type:\"boolean\"}\n",
"ModelsFromDrive = False #@param {type:\"boolean\"}\n",
"UseExtrasExtensions = True #@param {type:\"boolean\"}\n",
"#@markdown Enables hosting of extensions backend for TavernAI Extras\n",
"extras_enable_captioning = True #@param {type:\"boolean\"}\n",
"#@markdown Loads the image captioning module\n",
"Captions_Model = \"Salesforce/blip-image-captioning-large\" #@param [ \"Salesforce/blip-image-captioning-large\", \"Salesforce/blip-image-captioning-base\" ]\n",
"#@markdown * Salesforce/blip-image-captioning-large - good base model\n",
"#@markdown * Salesforce/blip-image-captioning-base - slightly faster but less accurate\n",
"extras_enable_emotions = True #@param {type:\"boolean\"}\n",
"#@markdown Loads the sentiment classification model\n",
"Emotions_Model = \"bhadresh-savani/distilbert-base-uncased-emotion\" #@param [\"bhadresh-savani/distilbert-base-uncased-emotion\", \"joeddav/distilbert-base-uncased-go-emotions-student\"]\n",
"#@markdown * bhadresh-savani/distilbert-base-uncased-emotion = 6 supported emotions<br>\n",
"#@markdown * joeddav/distilbert-base-uncased-go-emotions-student = 28 supported emotions\n",
"extras_enable_memory = True #@param {type:\"boolean\"}\n",
"#@markdown Loads the story summarization module\n",
"Memory_Model = \"Qiliang/bart-large-cnn-samsum-ChatGPT_v3\" #@param [ \"Qiliang/bart-large-cnn-samsum-ChatGPT_v3\", \"Qiliang/bart-large-cnn-samsum-ElectrifAi_v10\", \"distilbart-xsum-12-3\" ]\n",
"#@markdown * Qiliang/bart-large-cnn-samsum-ChatGPT_v3 - summarization model optimized for chats\n",
"#@markdown * Qiliang/bart-large-cnn-samsum-ElectrifAi_v10 - nice results so far, but still being evaluated\n",
"#@markdown * distilbart-xsum-12-3 - faster, but pretty basic alternative\n",
"\n",
"\n",
"%cd /content\n",
"\n",
"!cat .ii\n",
"!nvidia-smi\n",
"\n",
"import os, subprocess, time, pathlib, json, base64\n",
"\n",
"class ModelData:\n",
" def __init__(self, name, version = Version, revision = \"\", path = \"\", download = \"\"):\n",
" try:\n",
" self.name = base64.b64decode(name.encode(\"ascii\")).decode(\"ascii\")\n",
" except:\n",
" self.name = name\n",
" self.version = version \n",
" self.revision = revision\n",
" self.path = path\n",
" self.download = download\n",
" def args(self):\n",
" args = [\"-m\", self.name, \"-g\", self.version]\n",
" if(self.revision):\n",
" args += [\"-r\", self.revision]\n",
" return args\n",
"\n",
"class IncrementialInstall:\n",
" def __init__(self, tasks = [], force = []):\n",
" self.tasks = tasks\n",
" self.completed = list(filter(lambda x: not x in force, self.__completed()))\n",
"\n",
" def __completed(self):\n",
" try:\n",
" with open(\".ii\") as f:\n",
" return json.load(f)\n",
" except:\n",
" return []\n",
"\n",
" def addTask(self, name, func):\n",
" self.tasks.append({\"name\": name, \"func\": func})\n",
"\n",
" def run(self):\n",
" todo = list(filter(lambda x: not x[\"name\"] in self.completed, self.tasks))\n",
" try:\n",
" for task in todo:\n",
" task[\"func\"]()\n",
" self.completed.append(task[\"name\"])\n",
" finally:\n",
" with open(\".ii\", \"w\") as f:\n",
" json.dump(self.completed, f)\n",
"\n",
"ii = IncrementialInstall(force=ForceInitSteps)\n",
"\n",
"def create_paths(paths):\n",
" for directory in paths:\n",
" if not os.path.exists(directory):\n",
" os.makedirs(directory)\n",
"\n",
"# link source to dest copying dest to source if not present first\n",
"def link(srcDir, destDir, files):\n",
" for file in files:\n",
" source = os.path.join(srcDir, file)\n",
" dest = os.path.join(destDir, file)\n",
" if not os.path.exists(source):\n",
" !cp -r \"$dest\" \"$source\"\n",
" !rm -rf \"$dest\"\n",
" !ln -fs \"$source\" \"$dest\"\n",
"\n",
"from google.colab import drive\n",
"if UseGoogleDrive:\n",
" drive.mount(\"/content/drive/\")\n",
"else:\n",
" create_paths([\n",
" \"/content/drive/MyDrive\"\n",
" ])\n",
"\n",
"models = {\n",
" \"Nerys V2 6B\": ModelData(\"KoboldAI/OPT-6B-nerys-v2\"),\n",
" \"Erebus 6B\": ModelData(\"KoboldAI/OPT-6.7B-Erebus\"),\n",
" \"Skein 6B\": ModelData(\"KoboldAI/GPT-J-6B-Skein\"),\n",
" \"Janeway 6B\": ModelData(\"KoboldAI/GPT-J-6B-Janeway\"),\n",
" \"Adventure 6B\": ModelData(\"KoboldAI/GPT-J-6B-Adventure\"),\n",
" \"UHlnbWFsaW9uIDZC\": ModelData(\"UHlnbWFsaW9uQUkvcHlnbWFsaW9uLTZi\"),\n",
" \"UHlnbWFsaW9uIDZCIERldg==\": ModelData(\"UHlnbWFsaW9uQUkvcHlnbWFsaW9uLTZi\", revision = \"dev\"),\n",
" \"Lit V2 6B\": ModelData(\"hakurei/litv2-6B-rev3\"),\n",
" \"Lit 6B\": ModelData(\"hakurei/lit-6B\"),\n",
" \"Shinen 6B\": ModelData(\"KoboldAI/GPT-J-6B-Shinen\"),\n",
" \"Nerys 2.7B\": ModelData(\"KoboldAI/fairseq-dense-2.7B-Nerys\"),\n",
" \"Erebus 2.7B\": ModelData(\"KoboldAI/OPT-2.7B-Erebus\"),\n",
" \"Janeway 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Janeway\"),\n",
" \"Picard 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Picard\"),\n",
" \"AID 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-AID\"),\n",
" \"Horni LN 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Horni-LN\"),\n",
" \"Horni 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Horni\"),\n",
" \"Shinen 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Shinen\"),\n",
" \"Fairseq Dense 2.7B\": ModelData(\"KoboldAI/fairseq-dense-2.7B\"),\n",
" \"OPT 2.7B\": ModelData(\"facebook/opt-2.7b\"),\n",
" \"Neo 2.7B\": ModelData(\"EleutherAI/gpt-neo-2.7B\"),\n",
" \"Pygway 6B\": ModelData(\"TehVenom/PPO_Pygway-6b\"),\n",
" \"Nerybus 6.7B\": ModelData(\"KoboldAI/OPT-6.7B-Nerybus-Mix\"),\n",
" \"Pygway v8p4\": ModelData(\"TehVenom/PPO_Pygway-V8p4_Dev-6b\"),\n",
" \"PPO-Janeway 6B\": ModelData(\"TehVenom/PPO_Janeway-6b\"),\n",
" \"PPO Shygmalion 6B\": ModelData(\"TehVenom/PPO_Shygmalion-6b\"),\n",
" \"LLaMA 7B\": ModelData(\"decapoda-research/llama-7b-hf\"),\n",
" \"Janin-GPTJ\": ModelData(\"digitous/Janin-GPTJ\"),\n",
" \"Javelin-GPTJ\": ModelData(\"digitous/Javelin-GPTJ\"),\n",
" \"Javelin-R\": ModelData(\"digitous/Javelin-R\"),\n",
" \"Janin-R\": ModelData(\"digitous/Janin-R\"),\n",
" \"Javalion-R\": ModelData(\"digitous/Javalion-R\"),\n",
" \"Javalion-GPTJ\": ModelData(\"digitous/Javalion-GPTJ\"),\n",
" \"Javelion-6B\": ModelData(\"Cohee/Javelion-6b\"),\n",
" \"GPT-J-Pyg-PPO-6B\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B\"),\n",
" \"ppo_hh_pythia-6B\": ModelData(\"reciprocate/ppo_hh_pythia-6B\"),\n",
" \"ppo_hh_gpt-j\": ModelData(\"reciprocate/ppo_hh_gpt-j\"),\n",
" \"Alpaca-7B\": ModelData(\"chainyo/alpaca-lora-7b\"),\n",
" \"LLaMA 4-bit\": ModelData(\"decapoda-research/llama-13b-hf-int4\"),\n",
" \"GPT-J-Pyg_PPO-6B\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B\"),\n",
" \"GPT-J-Pyg_PPO-6B-Dev-V8p4\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B-Dev-V8p4\"),\n",
" \"Dolly_GPT-J-6b\": ModelData(\"TehVenom/Dolly_GPT-J-6b\"),\n",
" \"Dolly_Pyg-6B\": ModelData(\"TehVenom/AvgMerge_Dolly-Pygmalion-6b\")\n",
"}\n",
"model = models.get(Model, None)\n",
"\n",
"if model == None:\n",
" model = models.get(base64.b64encode(Model.encode(\"ascii\")).decode(\"ascii\"), ModelData(Model, Version))\n",
"\n",
"if StartKoboldAI:\n",
" def downloadKobold():\n",
" !wget https://koboldai.org/ckds && chmod +x ckds\n",
" def initKobold():\n",
" !./ckds --init only\n",
"\n",
" ii.addTask(\"Download KoboldAI\", downloadKobold)\n",
" ii.addTask(\"Init KoboldAI\", initKobold)\n",
" \n",
" ii.run()\n",
"\n",
"kargs = [\"/content/ckds\"]\n",
"if not ModelsFromDrive:\n",
" kargs += [\"-x\", \"colab\", \"-l\", \"colab\"]\n",
"if Provider == \"Localtunnel\":\n",
" kargs += [\"--localtunnel\", \"yes\"]\n",
"\n",
"kargs += model.args()\n",
"\n",
"url = \"\"\n",
"print(kargs)\n",
"\n",
"if StartKoboldAI:\n",
" p = subprocess.Popen(kargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
"\n",
" prefix = \"KoboldAI has finished loading and is available at the following link\"\n",
" urlprefix = f\"{prefix}: \"\n",
" ui1prefix = f\"{prefix} for UI 1: \"\n",
" while True:\n",
" line = p.stdout.readline().decode().strip()\n",
" if urlprefix in line:\n",
" url = line.split(urlprefix)[1]\n",
" break\n",
" elif ui1prefix in line:\n",
" url = line.split(ui1prefix)[1]\n",
" break\n",
" elif not line:\n",
" break\n",
" print(line)\n",
" if \"INIT\" in line and \"Transformers\" in line:\n",
" print(\"Model loading... (It will take 2 - 5 minutes)\")\n",
"\n",
"print(url)\n",
"\n",
"# #TavernAI\n",
"%cd /\n",
"def setupNVM():\n",
" !curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n",
"ii.addTask(\"Setup NVM\", setupNVM)\n",
"\n",
"def installNode():\n",
" !nvm install 19.1.0\n",
" !nvm use 19.1.0\n",
"ii.addTask(\"Install node\", installNode)\n",
"\n",
"\n",
"# TavernAI extras\n",
"params = []\n",
"params.append('--cpu')\n",
"ExtrasModules = []\n",
"\n",
"if (extras_enable_captioning):\n",
" ExtrasModules.append('caption')\n",
"if (extras_enable_memory):\n",
" ExtrasModules.append('summarize')\n",
"if (extras_enable_emotions):\n",
" ExtrasModules.append('classify')\n",
"\n",
"params.append(f'--classification-model={Emotions_Model}')\n",
"params.append(f'--summarization-model={Memory_Model}')\n",
"params.append(f'--captioning-model={Captions_Model}')\n",
"params.append(f'--enable-modules={\",\".join(ExtrasModules)}')\n",
"\n",
"extras_url = '(disabled)'\n",
"\n",
"if UseExtrasExtensions:\n",
" def cloneExtras():\n",
" %cd /\n",
" !git clone https://github.com/Cohee1207/TavernAI-extras\n",
" ii.addTask('clone extras', cloneExtras)\n",
"\n",
" def installRequirements():\n",
" %cd /TavernAI-extras\n",
" !npm install -g localtunnel\n",
" !pip install -r requirements.txt\n",
" !pip install tensorflow==2.11\n",
" ii.addTask('install requirements', installRequirements)\n",
"\n",
" def runServer():\n",
" cmd = f\"python server.py {' '.join(params)}\"\n",
" print(cmd)\n",
" extras_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd='/TavernAI-extras', shell=True)\n",
" print('processId:', extras_process.pid)\n",
" while True:\n",
" line = extras_process.stdout.readline().decode().strip()\n",
" if \"Running on \" in line:\n",
" break\n",
" if not line:\n",
" print('breaking on line')\n",
" break\n",
" print(line)\n",
" ii.addTask('run server', runServer)\n",
"\n",
" def extractUrl():\n",
" subprocess.call('nohup lt --port 5100 > ./extras.out 2> ./extras.err &', shell=True)\n",
" print('Waiting for lt init...')\n",
" time.sleep(5)\n",
"\n",
" while True:\n",
" if (os.path.getsize('./extras.out') > 0):\n",
" with open('./extras.out', 'r') as f:\n",
" lines = f.readlines()\n",
" for x in range(len(lines)):\n",
" if ('your url is: ' in lines[x]):\n",
" print('TavernAI Extensions URL:')\n",
" extras_url = lines[x].split('your url is: ')[1]\n",
" print(extras_url)\n",
" break\n",
" if (os.path.getsize('./extras.err') > 0):\n",
" with open('./extras.err', 'r') as f:\n",
" print(f.readlines())\n",
" break\n",
" ii.addTask('extract extras URL', extractUrl)\n",
"\n",
"def cloneTavern():\n",
" !git clone https://github.com/Cohee1207/SillyTavern\n",
"ii.addTask(\"Clone SillyTavern\", cloneTavern)\n",
"\n",
"ii.run()\n",
"%cd /SillyTavern\n",
"\n",
"if UseGoogleDrive:\n",
" %env googledrive=2\n",
"\n",
" def setupTavernPaths():\n",
" tdrive = \"/content/drive/MyDrive/SillyTavern\"\n",
" create_paths([\n",
" tdrive,\n",
" os.path.join(\"public\", \"groups\"),\n",
" os.path.join(\"public\", \"group chats\")\n",
" ])\n",
" link(tdrive, \"public\", [\n",
" \"settings.json\",\n",
" \"backgrounds\",\n",
" \"characters\",\n",
" \"chats\",\n",
" \"User Avatars\",\n",
" \"css\",\n",
" \"worlds\",\n",
" \"group chats\",\n",
" \"groups\",\n",
" ])\n",
" ii.addTask(\"Setup Tavern Paths\", setupTavernPaths)\n",
"\n",
"def installTavernDependencies():\n",
" !npm install\n",
" !npm install -g localtunnel\n",
"ii.addTask(\"Install Tavern Dependencies\", installTavernDependencies)\n",
"ii.run()\n",
"\n",
"%env colaburl=$url\n",
"%env SILLY_TAVERN_PORT=5001\n",
"print(\"KoboldAI LINK:\", url, '###Extensions API LINK###', extras_url, \"###SillyTavern LINK###\", sep=\"\\n\")\n",
"p = subprocess.Popen([\"lt\", \"--port\", \"5001\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
"print(p.stdout.readline().decode().strip())\n",
"!node server.js"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"private_outputs": true,
"provenance": []
},
"gpuClass": "standard",
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}