mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
extras_server in separate file like models
This commit is contained in:
140
colab/GPU.ipynb
140
colab/GPU.ipynb
@ -113,32 +113,19 @@
|
|||||||
"!cat .ii\n",
|
"!cat .ii\n",
|
||||||
"!nvidia-smi\n",
|
"!nvidia-smi\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import os, subprocess, time, pathlib, json, base64\n",
|
"import os, subprocess, time, pathlib, json, base64, sys\n",
|
||||||
"\n",
|
|
||||||
"class ModelData:\n",
|
|
||||||
" def __init__(self, name, version = Version, revision = \"\", path = \"\", download = \"\"):\n",
|
|
||||||
" try:\n",
|
|
||||||
" self.name = base64.b64decode(name.encode(\"ascii\")).decode(\"ascii\")\n",
|
|
||||||
" except:\n",
|
|
||||||
" self.name = name\n",
|
|
||||||
" self.version = version \n",
|
|
||||||
" self.revision = revision\n",
|
|
||||||
" self.path = path\n",
|
|
||||||
" self.download = download\n",
|
|
||||||
" def args(self):\n",
|
|
||||||
" args = [\"-m\", self.name, \"-g\", self.version]\n",
|
|
||||||
" if(self.revision):\n",
|
|
||||||
" args += [\"-r\", self.revision]\n",
|
|
||||||
" return args\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
|
"# ---\n",
|
||||||
|
"# Utils\n",
|
||||||
"class IncrementialInstall:\n",
|
"class IncrementialInstall:\n",
|
||||||
" def __init__(self, tasks = [], force = []):\n",
|
" def __init__(self, root = \"/\", tasks = [], force = []):\n",
|
||||||
" self.tasks = tasks\n",
|
" self.tasks = tasks\n",
|
||||||
|
" self.path = os.path.join(root, \".ii\")\n",
|
||||||
" self.completed = list(filter(lambda x: not x in force, self.__completed()))\n",
|
" self.completed = list(filter(lambda x: not x in force, self.__completed()))\n",
|
||||||
"\n",
|
"\n",
|
||||||
" def __completed(self):\n",
|
" def __completed(self):\n",
|
||||||
" try:\n",
|
" try:\n",
|
||||||
" with open(\".ii\") as f:\n",
|
" with open(self.path) as f:\n",
|
||||||
" return json.load(f)\n",
|
" return json.load(f)\n",
|
||||||
" except:\n",
|
" except:\n",
|
||||||
" return []\n",
|
" return []\n",
|
||||||
@ -153,18 +140,18 @@
|
|||||||
" task[\"func\"]()\n",
|
" task[\"func\"]()\n",
|
||||||
" self.completed.append(task[\"name\"])\n",
|
" self.completed.append(task[\"name\"])\n",
|
||||||
" finally:\n",
|
" finally:\n",
|
||||||
" with open(\".ii\", \"w\") as f:\n",
|
" with open(self.path, \"w\") as f:\n",
|
||||||
" json.dump(self.completed, f)\n",
|
" json.dump(self.completed, f)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"ii = IncrementialInstall(force=ForceInitSteps)\n",
|
|
||||||
"\n",
|
|
||||||
"def create_paths(paths):\n",
|
"def create_paths(paths):\n",
|
||||||
" for directory in paths:\n",
|
" for directory in paths:\n",
|
||||||
" if not os.path.exists(directory):\n",
|
" if not os.path.exists(directory):\n",
|
||||||
" os.makedirs(directory)\n",
|
" os.makedirs(directory)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# link source to dest copying dest to source if not present first\n",
|
|
||||||
"def link(srcDir, destDir, files):\n",
|
"def link(srcDir, destDir, files):\n",
|
||||||
|
" '''\n",
|
||||||
|
" Link source to dest copying dest to source if not present first\n",
|
||||||
|
" '''\n",
|
||||||
" for file in files:\n",
|
" for file in files:\n",
|
||||||
" source = os.path.join(srcDir, file)\n",
|
" source = os.path.join(srcDir, file)\n",
|
||||||
" dest = os.path.join(destDir, file)\n",
|
" dest = os.path.join(destDir, file)\n",
|
||||||
@ -181,56 +168,23 @@
|
|||||||
" \"/content/drive/MyDrive\"\n",
|
" \"/content/drive/MyDrive\"\n",
|
||||||
" ])\n",
|
" ])\n",
|
||||||
"\n",
|
"\n",
|
||||||
"models = {\n",
|
"ii = IncrementialInstall(force=ForceInitSteps)\n",
|
||||||
" \"Nerys V2 6B\": ModelData(\"KoboldAI/OPT-6B-nerys-v2\"),\n",
|
|
||||||
" \"Erebus 6B\": ModelData(\"KoboldAI/OPT-6.7B-Erebus\"),\n",
|
|
||||||
" \"Skein 6B\": ModelData(\"KoboldAI/GPT-J-6B-Skein\"),\n",
|
|
||||||
" \"Janeway 6B\": ModelData(\"KoboldAI/GPT-J-6B-Janeway\"),\n",
|
|
||||||
" \"Adventure 6B\": ModelData(\"KoboldAI/GPT-J-6B-Adventure\"),\n",
|
|
||||||
" \"UHlnbWFsaW9uIDZC\": ModelData(\"UHlnbWFsaW9uQUkvcHlnbWFsaW9uLTZi\"),\n",
|
|
||||||
" \"UHlnbWFsaW9uIDZCIERldg==\": ModelData(\"UHlnbWFsaW9uQUkvcHlnbWFsaW9uLTZi\", revision = \"dev\"),\n",
|
|
||||||
" \"Lit V2 6B\": ModelData(\"hakurei/litv2-6B-rev3\"),\n",
|
|
||||||
" \"Lit 6B\": ModelData(\"hakurei/lit-6B\"),\n",
|
|
||||||
" \"Shinen 6B\": ModelData(\"KoboldAI/GPT-J-6B-Shinen\"),\n",
|
|
||||||
" \"Nerys 2.7B\": ModelData(\"KoboldAI/fairseq-dense-2.7B-Nerys\"),\n",
|
|
||||||
" \"Erebus 2.7B\": ModelData(\"KoboldAI/OPT-2.7B-Erebus\"),\n",
|
|
||||||
" \"Janeway 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Janeway\"),\n",
|
|
||||||
" \"Picard 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Picard\"),\n",
|
|
||||||
" \"AID 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-AID\"),\n",
|
|
||||||
" \"Horni LN 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Horni-LN\"),\n",
|
|
||||||
" \"Horni 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Horni\"),\n",
|
|
||||||
" \"Shinen 2.7B\": ModelData(\"KoboldAI/GPT-Neo-2.7B-Shinen\"),\n",
|
|
||||||
" \"Fairseq Dense 2.7B\": ModelData(\"KoboldAI/fairseq-dense-2.7B\"),\n",
|
|
||||||
" \"OPT 2.7B\": ModelData(\"facebook/opt-2.7b\"),\n",
|
|
||||||
" \"Neo 2.7B\": ModelData(\"EleutherAI/gpt-neo-2.7B\"),\n",
|
|
||||||
" \"Pygway 6B\": ModelData(\"TehVenom/PPO_Pygway-6b\"),\n",
|
|
||||||
" \"Nerybus 6.7B\": ModelData(\"KoboldAI/OPT-6.7B-Nerybus-Mix\"),\n",
|
|
||||||
" \"Pygway v8p4\": ModelData(\"TehVenom/PPO_Pygway-V8p4_Dev-6b\"),\n",
|
|
||||||
" \"PPO-Janeway 6B\": ModelData(\"TehVenom/PPO_Janeway-6b\"),\n",
|
|
||||||
" \"PPO Shygmalion 6B\": ModelData(\"TehVenom/PPO_Shygmalion-6b\"),\n",
|
|
||||||
" \"LLaMA 7B\": ModelData(\"decapoda-research/llama-7b-hf\"),\n",
|
|
||||||
" \"Janin-GPTJ\": ModelData(\"digitous/Janin-GPTJ\"),\n",
|
|
||||||
" \"Javelin-GPTJ\": ModelData(\"digitous/Javelin-GPTJ\"),\n",
|
|
||||||
" \"Javelin-R\": ModelData(\"digitous/Javelin-R\"),\n",
|
|
||||||
" \"Janin-R\": ModelData(\"digitous/Janin-R\"),\n",
|
|
||||||
" \"Javalion-R\": ModelData(\"digitous/Javalion-R\"),\n",
|
|
||||||
" \"Javalion-GPTJ\": ModelData(\"digitous/Javalion-GPTJ\"),\n",
|
|
||||||
" \"Javelion-6B\": ModelData(\"Cohee/Javelion-6b\"),\n",
|
|
||||||
" \"GPT-J-Pyg-PPO-6B\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B\"),\n",
|
|
||||||
" \"ppo_hh_pythia-6B\": ModelData(\"reciprocate/ppo_hh_pythia-6B\"),\n",
|
|
||||||
" \"ppo_hh_gpt-j\": ModelData(\"reciprocate/ppo_hh_gpt-j\"),\n",
|
|
||||||
" \"Alpaca-7B\": ModelData(\"chainyo/alpaca-lora-7b\"),\n",
|
|
||||||
" \"LLaMA 4-bit\": ModelData(\"decapoda-research/llama-13b-hf-int4\"),\n",
|
|
||||||
" \"GPT-J-Pyg_PPO-6B\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B\"),\n",
|
|
||||||
" \"GPT-J-Pyg_PPO-6B-Dev-V8p4\": ModelData(\"TehVenom/GPT-J-Pyg_PPO-6B-Dev-V8p4\"),\n",
|
|
||||||
" \"Dolly_GPT-J-6b\": ModelData(\"TehVenom/Dolly_GPT-J-6b\"),\n",
|
|
||||||
" \"Dolly_Pyg-6B\": ModelData(\"TehVenom/AvgMerge_Dolly-Pygmalion-6b\")\n",
|
|
||||||
"}\n",
|
|
||||||
"model = models.get(Model, None)\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"if model == None:\n",
|
"# ---\n",
|
||||||
" model = models.get(base64.b64encode(Model.encode(\"ascii\")).decode(\"ascii\"), ModelData(Model, Version))\n",
|
"# SillyTavern py modules\n",
|
||||||
|
"def cloneTavern():\n",
|
||||||
|
" %cd /\n",
|
||||||
|
" !git clone https://github.com/EnergoStalin/SillyTavern\n",
|
||||||
|
" %cd -\n",
|
||||||
|
" !cp /SillyTavern/colab/*.py ./\n",
|
||||||
|
"ii.addTask(\"Clone SillyTavern\", cloneTavern)\n",
|
||||||
|
"ii.run()\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
"from models import GetModels\n",
|
||||||
|
"model = GetModels(Version).get(Model, models.ModelData(Model, Version))\n",
|
||||||
|
"\n",
|
||||||
|
"# ---\n",
|
||||||
|
"# KoboldAI\n",
|
||||||
"if StartKoboldAI:\n",
|
"if StartKoboldAI:\n",
|
||||||
" def downloadKobold():\n",
|
" def downloadKobold():\n",
|
||||||
" !wget https://koboldai.org/ckds && chmod +x ckds\n",
|
" !wget https://koboldai.org/ckds && chmod +x ckds\n",
|
||||||
@ -275,7 +229,9 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"print(url)\n",
|
"print(url)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# #TavernAI\n",
|
"\n",
|
||||||
|
"# ---\n",
|
||||||
|
"# nodejs\n",
|
||||||
"%cd /\n",
|
"%cd /\n",
|
||||||
"def setupNVM():\n",
|
"def setupNVM():\n",
|
||||||
" !curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n",
|
" !curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash\n",
|
||||||
@ -287,6 +243,7 @@
|
|||||||
"ii.addTask(\"Install node\", installNode)\n",
|
"ii.addTask(\"Install node\", installNode)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
"# ---\n",
|
||||||
"# TavernAI extras\n",
|
"# TavernAI extras\n",
|
||||||
"params = []\n",
|
"params = []\n",
|
||||||
"params.append('--cpu')\n",
|
"params.append('--cpu')\n",
|
||||||
@ -319,47 +276,10 @@
|
|||||||
" !pip install tensorflow==2.11\n",
|
" !pip install tensorflow==2.11\n",
|
||||||
" ii.addTask('install requirements', installRequirements)\n",
|
" ii.addTask('install requirements', installRequirements)\n",
|
||||||
"\n",
|
"\n",
|
||||||
" def runServer():\n",
|
" from extras_server import runServer, extractUrl\n",
|
||||||
" cmd = f\"python server.py {' '.join(params)}\"\n",
|
|
||||||
" print(cmd)\n",
|
|
||||||
" extras_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd='/TavernAI-extras', shell=True)\n",
|
|
||||||
" print('processId:', extras_process.pid)\n",
|
|
||||||
" while True:\n",
|
|
||||||
" line = extras_process.stdout.readline().decode().strip()\n",
|
|
||||||
" if \"Running on \" in line:\n",
|
|
||||||
" break\n",
|
|
||||||
" if not line:\n",
|
|
||||||
" print('breaking on line')\n",
|
|
||||||
" break\n",
|
|
||||||
" print(line)\n",
|
|
||||||
" ii.addTask('run server', runServer)\n",
|
" ii.addTask('run server', runServer)\n",
|
||||||
"\n",
|
|
||||||
" def extractUrl():\n",
|
|
||||||
" subprocess.call('nohup lt --port 5100 > ./extras.out 2> ./extras.err &', shell=True)\n",
|
|
||||||
" print('Waiting for lt init...')\n",
|
|
||||||
" time.sleep(5)\n",
|
|
||||||
"\n",
|
|
||||||
" while True:\n",
|
|
||||||
" if (os.path.getsize('./extras.out') > 0):\n",
|
|
||||||
" with open('./extras.out', 'r') as f:\n",
|
|
||||||
" lines = f.readlines()\n",
|
|
||||||
" for x in range(len(lines)):\n",
|
|
||||||
" if ('your url is: ' in lines[x]):\n",
|
|
||||||
" print('TavernAI Extensions URL:')\n",
|
|
||||||
" extras_url = lines[x].split('your url is: ')[1]\n",
|
|
||||||
" print(extras_url)\n",
|
|
||||||
" break\n",
|
|
||||||
" if (os.path.getsize('./extras.err') > 0):\n",
|
|
||||||
" with open('./extras.err', 'r') as f:\n",
|
|
||||||
" print(f.readlines())\n",
|
|
||||||
" break\n",
|
|
||||||
" ii.addTask('extract extras URL', extractUrl)\n",
|
" ii.addTask('extract extras URL', extractUrl)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"def cloneTavern():\n",
|
|
||||||
" !git clone https://github.com/Cohee1207/SillyTavern\n",
|
|
||||||
"ii.addTask(\"Clone SillyTavern\", cloneTavern)\n",
|
|
||||||
"\n",
|
|
||||||
"ii.run()\n",
|
|
||||||
"%cd /SillyTavern\n",
|
"%cd /SillyTavern\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if UseGoogleDrive:\n",
|
"if UseGoogleDrive:\n",
|
||||||
|
35
colab/extras_server.py
Normal file
35
colab/extras_server.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import os, time, subprocess
|
||||||
|
|
||||||
|
def runServer():
|
||||||
|
cmd = f"python server.py {' '.join(params)}"
|
||||||
|
print(cmd)
|
||||||
|
extras_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd='/TavernAI-extras', shell=True)
|
||||||
|
print('processId:', extras_process.pid)
|
||||||
|
while True:
|
||||||
|
line = extras_process.stdout.readline().decode().strip()
|
||||||
|
if "Running on " in line:
|
||||||
|
break
|
||||||
|
if not line:
|
||||||
|
print('breaking on line')
|
||||||
|
break
|
||||||
|
print(line)
|
||||||
|
|
||||||
|
def extractUrl():
|
||||||
|
subprocess.call('nohup lt --port 5100 > ./extras.out 2> ./extras.err &', shell=True)
|
||||||
|
print('Waiting for lt init...')
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if (os.path.getsize('./extras.out') > 0):
|
||||||
|
with open('./extras.out', 'r') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
for x in range(len(lines)):
|
||||||
|
if ('your url is: ' in lines[x]):
|
||||||
|
print('TavernAI Extensions URL:')
|
||||||
|
extras_url = lines[x].split('your url is: ')[1]
|
||||||
|
print(extras_url)
|
||||||
|
break
|
||||||
|
if (os.path.getsize('./extras.err') > 0):
|
||||||
|
with open('./extras.err', 'r') as f:
|
||||||
|
print(f.readlines())
|
||||||
|
break
|
||||||
|
time.sleep(1)
|
77
colab/models.py
Normal file
77
colab/models.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
class ModelData:
|
||||||
|
def __init__(self, name, version = "", revision="", path="", download=""):
|
||||||
|
self.name = name
|
||||||
|
self.version = version
|
||||||
|
self.revision = revision
|
||||||
|
self.path = path
|
||||||
|
self.download = download
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.args().__str__()
|
||||||
|
|
||||||
|
def args(self):
|
||||||
|
args = ["-m", self.name]
|
||||||
|
if (self.version):
|
||||||
|
args += ["-g", self.version]
|
||||||
|
if (self.revision):
|
||||||
|
args += ["-r", self.revision]
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
class ModelFactory:
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def NewModelData(self, name, **kwargs):
|
||||||
|
cpy = self.kwargs.copy()
|
||||||
|
cpy.update(kwargs)
|
||||||
|
return ModelData(name = name, **cpy)
|
||||||
|
|
||||||
|
|
||||||
|
def GetModels(Version):
|
||||||
|
mf = ModelFactory(version=Version)
|
||||||
|
return {
|
||||||
|
"Nerys V2 6B": mf.NewModelData("KoboldAI/OPT-6B-nerys-v2"),
|
||||||
|
"Erebus 6B": mf.NewModelData("KoboldAI/OPT-6.7B-Erebus"),
|
||||||
|
"Skein 6B": mf.NewModelData("KoboldAI/GPT-J-6B-Skein"),
|
||||||
|
"Janeway 6B": mf.NewModelData("KoboldAI/GPT-J-6B-Janeway"),
|
||||||
|
"Adventure 6B": mf.NewModelData("KoboldAI/GPT-J-6B-Adventure"),
|
||||||
|
"Pygmalion 6B": mf.NewModelData("PygmalionAI/pygmalion-6b"),
|
||||||
|
"Pygmalion 6B Dev": mf.NewModelData("PygmalionAI/pygmalion-6b", revision="dev"),
|
||||||
|
"Lit V2 6B": mf.NewModelData("hakurei/litv2-6B-rev3"),
|
||||||
|
"Lit 6B": mf.NewModelData("hakurei/lit-6B"),
|
||||||
|
"Shinen 6B": mf.NewModelData("KoboldAI/GPT-J-6B-Shinen"),
|
||||||
|
"Nerys 2.7B": mf.NewModelData("KoboldAI/fairseq-dense-2.7B-Nerys"),
|
||||||
|
"Erebus 2.7B": mf.NewModelData("KoboldAI/OPT-2.7B-Erebus"),
|
||||||
|
"Janeway 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-Janeway"),
|
||||||
|
"Picard 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-Picard"),
|
||||||
|
"AID 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-AID"),
|
||||||
|
"Horni LN 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-Horni-LN"),
|
||||||
|
"Horni 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-Horni"),
|
||||||
|
"Shinen 2.7B": mf.NewModelData("KoboldAI/GPT-Neo-2.7B-Shinen"),
|
||||||
|
"Fairseq Dense 2.7B": mf.NewModelData("KoboldAI/fairseq-dense-2.7B"),
|
||||||
|
"OPT 2.7B": mf.NewModelData("facebook/opt-2.7b"),
|
||||||
|
"Neo 2.7B": mf.NewModelData("EleutherAI/gpt-neo-2.7B"),
|
||||||
|
"Pygway 6B": mf.NewModelData("TehVenom/PPO_Pygway-6b"),
|
||||||
|
"Nerybus 6.7B": mf.NewModelData("KoboldAI/OPT-6.7B-Nerybus-Mix"),
|
||||||
|
"Pygway v8p4": mf.NewModelData("TehVenom/PPO_Pygway-V8p4_Dev-6b"),
|
||||||
|
"PPO-Janeway 6B": mf.NewModelData("TehVenom/PPO_Janeway-6b"),
|
||||||
|
"PPO Shygmalion 6B": mf.NewModelData("TehVenom/PPO_Shygmalion-6b"),
|
||||||
|
"LLaMA 7B": mf.NewModelData("decapoda-research/llama-7b-hf"),
|
||||||
|
"Janin-GPTJ": mf.NewModelData("digitous/Janin-GPTJ"),
|
||||||
|
"Javelin-GPTJ": mf.NewModelData("digitous/Javelin-GPTJ"),
|
||||||
|
"Javelin-R": mf.NewModelData("digitous/Javelin-R"),
|
||||||
|
"Janin-R": mf.NewModelData("digitous/Janin-R"),
|
||||||
|
"Javalion-R": mf.NewModelData("digitous/Javalion-R"),
|
||||||
|
"Javalion-GPTJ": mf.NewModelData("digitous/Javalion-GPTJ"),
|
||||||
|
"Javelion-6B": mf.NewModelData("Cohee/Javelion-6b"),
|
||||||
|
"GPT-J-Pyg-PPO-6B": mf.NewModelData("TehVenom/GPT-J-Pyg_PPO-6B"),
|
||||||
|
"ppo_hh_pythia-6B": mf.NewModelData("reciprocate/ppo_hh_pythia-6B"),
|
||||||
|
"ppo_hh_gpt-j": mf.NewModelData("reciprocate/ppo_hh_gpt-j"),
|
||||||
|
"Alpaca-7B": mf.NewModelData("chainyo/alpaca-lora-7b"),
|
||||||
|
"LLaMA 4-bit": mf.NewModelData("decapoda-research/llama-13b-hf-int4"),
|
||||||
|
"GPT-J-Pyg_PPO-6B": mf.NewModelData("TehVenom/GPT-J-Pyg_PPO-6B"),
|
||||||
|
"GPT-J-Pyg_PPO-6B-Dev-V8p4": mf.NewModelData("TehVenom/GPT-J-Pyg_PPO-6B-Dev-V8p4"),
|
||||||
|
"Dolly_GPT-J-6b": mf.NewModelData("TehVenom/Dolly_GPT-J-6b"),
|
||||||
|
"Dolly_Pyg-6B": mf.NewModelData("TehVenom/AvgMerge_Dolly-Pygmalion-6b")
|
||||||
|
}
|
Reference in New Issue
Block a user