diff --git a/colab/GPU.ipynb b/colab/GPU.ipynb index 527ac3aa..ca71f3a2 100644 --- a/colab/GPU.ipynb +++ b/colab/GPU.ipynb @@ -5,7 +5,8 @@ "colab": { "name": "ColabKobold GPU", "private_outputs": true, - "provenance": [] + "provenance": [], + "include_colab_link": true }, "kernelspec": { "display_name": "Python 3", @@ -17,6 +18,16 @@ "accelerator": "GPU" }, "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, { "cell_type": "markdown", "metadata": { @@ -71,7 +82,7 @@ "#@title <-- Select your model below and then click this to start KoboldAI\n", "#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n", "\n", - "Model = \"Nerys V2 6B\" #@param [\"Nerys V2 6B\", \"Erebus 6B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Pygmalion 6B\", \"Lit V2 6B\", \"Lit 6B\", \"Shinen 6B\", \"Nerys 2.7B\", \"AID 2.7B\", \"Erebus 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"OPT 2.7B\", \"Fairseq Dense 2.7B\", \"Neo 2.7B\"] {allow-input: true}\n", + "Model = \"Nerys V2 6B\" #@param [\"Nerys V2 6B\", \"Erebus 6B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Pygmalion 6B\", \"Pygmalion 6B Dev\", \"Lit V2 6B\", \"Lit 6B\", \"Shinen 6B\", \"Nerys 2.7B\", \"AID 2.7B\", \"Erebus 2.7B\", \"Janeway 2.7B\", \"Picard 2.7B\", \"Horni LN 2.7B\", \"Horni 2.7B\", \"Shinen 2.7B\", \"OPT 2.7B\", \"Fairseq Dense 2.7B\", \"Neo 2.7B\"] {allow-input: true}\n", "Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n", "Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n", "use_google_drive = True #@param {type:\"boolean\"}\n", @@ -87,6 +98,8 @@ " if not os.path.exists(\"/content/drive/MyDrive/\"):\n", " os.mkdir(\"/content/drive/MyDrive/\")\n", "\n", + "Revision = \"\"\n", + "\n", "if Model == \"Nerys V2 6B\":\n", " Model = \"KoboldAI/OPT-6B-nerys-v2\"\n", " path = \"\"\n", @@ -111,6 +124,11 @@ " Model = \"PygmalionAI/pygmalion-6b\"\n", " path = \"\"\n", " download = \"\"\n", + "elif Model == \"Pygmalion 6B Dev\":\n", + " Model = \"PygmalionAI/pygmalion-6b\"\n", + " Revision = \"dev\"\n", + " path = \"\"\n", + " download = \"\"\n", "elif Model == \"Lit V2 6B\":\n", " Model = \"hakurei/litv2-6B-rev3\"\n", " path = \"\"\n", @@ -173,7 +191,7 @@ "else:\n", " tunnel = \"\"\n", "\n", - "!wget https://koboldai.org/ckds -O - | bash /dev/stdin -m $Model -g $Version $tunnel" + "!wget https://koboldai.org/ckds -O - | bash /dev/stdin -m $Model -g $Version $tunnel $Revision" ], "execution_count": null, "outputs": []