From 487739911ab11049214feb40ef50c672eefc6c85 Mon Sep 17 00:00:00 2001 From: henk717 Date: Wed, 8 Mar 2023 18:44:03 +0100 Subject: [PATCH 1/4] Restore Pygmalion 6B Dev --- colab/TPU.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/colab/TPU.ipynb b/colab/TPU.ipynb index 3976f6dd..96207af4 100644 --- a/colab/TPU.ipynb +++ b/colab/TPU.ipynb @@ -66,7 +66,7 @@ "#@title <-- Select your model below and then click this to start KoboldAI\n", "#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n", "\n", - "Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Nerybus 13B\", \"Erebus 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Erebus 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Pygmalion 6B\", \"Lit V2 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n", + "Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Nerybus 13B\", \"Erebus 13B\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Erebus 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Pygmalion 6B\", \"Pygmalion 6B Dev\", \"Lit V2 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n", "Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n", "Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n", "use_google_drive = True #@param {type:\"boolean\"}\n", @@ -177,7 +177,7 @@ "else:\n", " tunnel = \"\"\n", "\n", - "!wget https://koboldai.org/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel" + "!wget https://koboldai.org/ckds -O - | bash /dev/stdin $path$download -m $Model -g $Version $tunnel $Revision" ] }, { From ffa5c0bc13ae98f10d521da3cdec665337a87bf1 Mon Sep 17 00:00:00 2001 From: henk717 Date: Wed, 8 Mar 2023 20:52:03 +0100 Subject: [PATCH 2/4] Empty Revision Fix --- colab/TPU.ipynb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/colab/TPU.ipynb b/colab/TPU.ipynb index 96207af4..131b134a 100644 --- a/colab/TPU.ipynb +++ b/colab/TPU.ipynb @@ -89,6 +89,8 @@ " if not os.path.exists(\"/content/drive/MyDrive/\"):\n", " os.mkdir(\"/content/drive/MyDrive/\")\n", "\n", + "Revision = \"\"\n", + "\n", "if Model == \"Janeway 13B\":\n", " Model = \"KoboldAI/fairseq-dense-13B-Janeway\"\n", " path = \"\"\n", From 219b824b9b239c1b14b1df553622a465d63f433a Mon Sep 17 00:00:00 2001 From: Henk Date: Fri, 17 Mar 2023 01:28:59 +0100 Subject: [PATCH 3/4] SocketIO Requirements Pin --- environments/huggingface.yml | 7 ++++--- environments/rocm.yml | 7 ++++--- requirements.txt | 9 +++++---- requirements_mtj.txt | 9 +++++---- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/environments/huggingface.yml b/environments/huggingface.yml index 5942d79a..1bef463d 100644 --- a/environments/huggingface.yml +++ b/environments/huggingface.yml @@ -5,12 +5,13 @@ channels: - defaults dependencies: - colorama - - flask-socketio - - flask-session + - flask-socketio=5.3.2 + - flask-session=0.4.0 + - python-socketio=5.7.2 - pytorch=1.11.* - python=3.8.* - cudatoolkit=11.1 - - eventlet + - eventlet=0.33.3 - dnspython=2.2.1 - markdown - bleach=4.1.0 diff --git a/environments/rocm.yml b/environments/rocm.yml index c9c6b487..47d29e07 100644 --- a/environments/rocm.yml +++ b/environments/rocm.yml @@ -4,10 +4,11 @@ channels: - defaults dependencies: - colorama - - flask-socketio - - flask-session + - flask-socketio=5.3.2 + - flask-session=0.4.0 + - python-socketio=5.7.2 - python=3.8.* - - eventlet + - eventlet=0.33.3 - dnspython=2.2.1 - markdown - bleach=4.1.0 diff --git a/requirements.txt b/requirements.txt index b79ca5cb..b13230ed 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,13 @@ transformers==4.24.0 huggingface_hub==0.12.1 -Flask -Flask-SocketIO +Flask==2.2.3 +Flask-SocketIO==5.3.2 +python-socketio==5.7.2 requests torch >= 1.9, < 1.13 flask-cloudflared==0.0.10 flask-ngrok -eventlet +eventlet==0.33.3 dnspython==2.2.1 lupa==1.10 markdown @@ -14,7 +15,7 @@ bleach==4.1.0 sentencepiece protobuf accelerate -flask-session +flask-session==0.4.0 marshmallow>=3.13 apispec-webframeworks loguru diff --git a/requirements_mtj.txt b/requirements_mtj.txt index c99852e7..e66e1ba7 100644 --- a/requirements_mtj.txt +++ b/requirements_mtj.txt @@ -9,16 +9,17 @@ transformers == 4.24.0 huggingface_hub==0.12.1 progressbar2 git+https://github.com/VE-FORBRYDERNE/mesh-transformer-jax@ck -flask -Flask-SocketIO +Flask==2.2.3 +Flask-SocketIO==5.3.2 +python-socketio==5.7.2 flask-cloudflared==0.0.10 flask-ngrok -eventlet +eventlet==0.33.3 dnspython==2.2.1 lupa==1.10 markdown bleach==4.1.0 -flask-session +flask-session==0.4.0 marshmallow>=3.13 apispec-webframeworks loguru From 90a7eb615351afd7901939c616b5165576198889 Mon Sep 17 00:00:00 2001 From: Henk Date: Fri, 17 Mar 2023 12:40:08 +0100 Subject: [PATCH 4/4] LLama tokenizer settings --- aiserver.py | 7 +++++- install_git_transformers.bat | 41 ++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 install_git_transformers.bat diff --git a/aiserver.py b/aiserver.py index 412fce91..3368447b 100644 --- a/aiserver.py +++ b/aiserver.py @@ -3217,7 +3217,12 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal #koboldai_vars.badwords = gettokenids("[") #for key in koboldai_vars.badwords: # koboldai_vars.badwordsids.append([vocab[key]]) - + + # These are model specific overrides if a model has bad defaults + if koboldai_vars.model_type == "llama": + tokenizer.decode_with_prefix_space = True + tokenizer.add_bos_token = False + logger.info(f"Pipeline created: {koboldai_vars.model}") else: diff --git a/install_git_transformers.bat b/install_git_transformers.bat new file mode 100644 index 00000000..3e154b80 --- /dev/null +++ b/install_git_transformers.bat @@ -0,0 +1,41 @@ +@echo off +cd /D %~dp0 +SET CONDA_SHLVL= + +TITLE KoboldAI - Git Transformers Installer +ECHO This script will replace the Transformers version with the latest Git Transformers which may contain breaking changes. +ECHO If you wish to return to the approved version of transformers you can run the install_requirements.bat script or KoboldAI Updater. +pause + +SET /P M=nul +subst K: miniconda3 >nul +SET TEMP=K:\ +SET TMP=K:\ +call K:\python\condabin\activate +pip install git+https://github.com/huggingface/transformers +cmd /k + +:drivemap_B +ECHO Runtime launching in B: drive mode +subst /D B: >nul +subst B: miniconda3 >nul +SET TEMP=B:\ +SET TMP=B:\ +call B:\python\condabin\activate +pip install git+https://github.com/huggingface/transformers +cmd /k \ No newline at end of file