diff --git a/aiserver.py b/aiserver.py index 412fce91..3368447b 100644 --- a/aiserver.py +++ b/aiserver.py @@ -3217,7 +3217,12 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal #koboldai_vars.badwords = gettokenids("[") #for key in koboldai_vars.badwords: # koboldai_vars.badwordsids.append([vocab[key]]) - + + # These are model specific overrides if a model has bad defaults + if koboldai_vars.model_type == "llama": + tokenizer.decode_with_prefix_space = True + tokenizer.add_bos_token = False + logger.info(f"Pipeline created: {koboldai_vars.model}") else: diff --git a/install_git_transformers.bat b/install_git_transformers.bat new file mode 100644 index 00000000..3e154b80 --- /dev/null +++ b/install_git_transformers.bat @@ -0,0 +1,41 @@ +@echo off +cd /D %~dp0 +SET CONDA_SHLVL= + +TITLE KoboldAI - Git Transformers Installer +ECHO This script will replace the Transformers version with the latest Git Transformers which may contain breaking changes. +ECHO If you wish to return to the approved version of transformers you can run the install_requirements.bat script or KoboldAI Updater. +pause + +SET /P M=nul +subst K: miniconda3 >nul +SET TEMP=K:\ +SET TMP=K:\ +call K:\python\condabin\activate +pip install git+https://github.com/huggingface/transformers +cmd /k + +:drivemap_B +ECHO Runtime launching in B: drive mode +subst /D B: >nul +subst B: miniconda3 >nul +SET TEMP=B:\ +SET TMP=B:\ +call B:\python\condabin\activate +pip install git+https://github.com/huggingface/transformers +cmd /k \ No newline at end of file