diff --git a/aiserver.py b/aiserver.py
index 4558ce3d..f58d949a 100644
--- a/aiserver.py
+++ b/aiserver.py
@@ -3248,7 +3248,12 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
#koboldai_vars.badwords = gettokenids("[")
#for key in koboldai_vars.badwords:
# koboldai_vars.badwordsids.append([vocab[key]])
-
+
+ # These are model specific overrides if a model has bad defaults
+ if koboldai_vars.model_type == "llama":
+ tokenizer.decode_with_prefix_space = True
+ tokenizer.add_bos_token = False
+
logger.info(f"Pipeline created: {koboldai_vars.model}")
else:
diff --git a/colab/TPU.ipynb b/colab/TPU.ipynb
index 3976f6dd..131b134a 100644
--- a/colab/TPU.ipynb
+++ b/colab/TPU.ipynb
@@ -66,7 +66,7 @@
"#@title <-- Select your model below and then click this to start KoboldAI\n",
"#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n",
"\n",
- "Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Nerybus 13B\", \"Erebus 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Erebus 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Pygmalion 6B\", \"Lit V2 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
+ "Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Nerybus 13B\", \"Erebus 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Erebus 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Pygmalion 6B\", \"Pygmalion 6B Dev\", \"Lit V2 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
"Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
"use_google_drive = True #@param {type:\"boolean\"}\n",
@@ -89,6 +89,8 @@
" if not os.path.exists(\"/content/drive/MyDrive/\"):\n",
" os.mkdir(\"/content/drive/MyDrive/\")\n",
"\n",
+ "Revision = \"\"\n",
+ "\n",
"if Model == \"Janeway 13B\":\n",
" Model = \"KoboldAI/fairseq-dense-13B-Janeway\"\n",
" path = \"\"\n",
@@ -177,7 +179,7 @@
"else:\n",
" tunnel = \"\"\n",
"\n",
- "!wget https://koboldai.org/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel"
+ "!wget https://koboldai.org/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel $Revision"
]
},
{
diff --git a/environments/huggingface.yml b/environments/huggingface.yml
index 26e7e670..42dda9c3 100644
--- a/environments/huggingface.yml
+++ b/environments/huggingface.yml
@@ -5,12 +5,13 @@ channels:
- defaults
dependencies:
- colorama
- - flask-socketio
- - flask-session
+ - flask-socketio=5.3.2
+ - flask-session=0.4.0
+ - python-socketio=5.7.2
- pytorch=1.11.*
- python=3.8.*
- cudatoolkit=11.1
- - eventlet
+ - eventlet=0.33.3
- dnspython=2.2.1
- markdown
- bleach=4.1.0
diff --git a/environments/rocm.yml b/environments/rocm.yml
index 13eae816..43fd331f 100644
--- a/environments/rocm.yml
+++ b/environments/rocm.yml
@@ -4,10 +4,11 @@ channels:
- defaults
dependencies:
- colorama
- - flask-socketio
- - flask-session
+ - flask-socketio=5.3.2
+ - flask-session=0.4.0
+ - python-socketio=5.7.2
- python=3.8.*
- - eventlet
+ - eventlet=0.33.3
- dnspython=2.2.1
- markdown
- bleach=4.1.0
diff --git a/install_git_transformers.bat b/install_git_transformers.bat
new file mode 100644
index 00000000..3e154b80
--- /dev/null
+++ b/install_git_transformers.bat
@@ -0,0 +1,41 @@
+@echo off
+cd /D %~dp0
+SET CONDA_SHLVL=
+
+TITLE KoboldAI - Git Transformers Installer
+ECHO This script will replace the Transformers version with the latest Git Transformers which may contain breaking changes.
+ECHO If you wish to return to the approved version of transformers you can run the install_requirements.bat script or KoboldAI Updater.
+pause
+
+SET /P M=nul
+subst K: miniconda3 >nul
+SET TEMP=K:\
+SET TMP=K:\
+call K:\python\condabin\activate
+pip install git+https://github.com/huggingface/transformers
+cmd /k
+
+:drivemap_B
+ECHO Runtime launching in B: drive mode
+subst /D B: >nul
+subst B: miniconda3 >nul
+SET TEMP=B:\
+SET TMP=B:\
+call B:\python\condabin\activate
+pip install git+https://github.com/huggingface/transformers
+cmd /k
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index e7f79dbd..d85c61fd 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,13 +1,14 @@
transformers==4.25.1
huggingface_hub==0.12.1
-Flask
-Flask-SocketIO
+Flask==2.2.3
+Flask-SocketIO==5.3.2
+python-socketio==5.7.2
requests
torch >= 1.9, < 1.13
flask-cloudflared==0.0.10
flask-ngrok
flask-cors
-eventlet
+eventlet==0.33.3
dnspython==2.2.1
lupa==1.10
markdown
@@ -15,7 +16,7 @@ bleach==4.1.0
sentencepiece
protobuf
accelerate
-flask_session
+flask-session==0.4.0
marshmallow>=3.13
apispec-webframeworks
loguru
diff --git a/requirements_mtj.txt b/requirements_mtj.txt
index e0f89ece..759e3b1c 100644
--- a/requirements_mtj.txt
+++ b/requirements_mtj.txt
@@ -9,17 +9,18 @@ transformers == 4.25.1
huggingface_hub==0.12.1
progressbar2
git+https://github.com/VE-FORBRYDERNE/mesh-transformer-jax@ck
-flask
-Flask-SocketIO
+Flask==2.2.3
+Flask-SocketIO==5.3.2
+python-socketio==5.7.2
flask-cloudflared==0.0.10
flask-ngrok
flask-cors
-eventlet
+eventlet==0.33.3
dnspython==2.2.1
lupa==1.10
markdown
bleach==4.1.0
-flask-session
+flask-session==0.4.0
marshmallow>=3.13
apispec-webframeworks
loguru