From b209cf98683484958735c1ab32ac467f97eb97a1 Mon Sep 17 00:00:00 2001 From: henk717 Date: Sat, 18 Jun 2022 19:46:16 +0200 Subject: [PATCH] NS mode as default Experimental change that makes NS the default, more and more models seem to be requiring this as megatron based models are getting traction, neither does this seem to break the original models (with the exception of a user not being able to use in generated outputs, the extremely rare case someone would be effected by this they can manually switch the mode by editing their settings file). If this breaks nothing ns will remain the default, however the n mode should remain a choice for those who need it. In case it does get reversed I have also added the bloom model type to the ns list since its models require this. --- aiserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiserver.py b/aiserver.py index 81af6f31..c43f9f0f 100644 --- a/aiserver.py +++ b/aiserver.py @@ -339,7 +339,7 @@ class vars: rngpersist = False nogenmod = False welcome = False # Custom Welcome Text (False is default) - newlinemode = "n" + newlinemode = "ns" quiet = False # If set will suppress any story text from being printed to the console (will only be seen on the client web page) debug = False # If set to true, will send debug information to the client for display lazy_load = True # Whether or not to use torch_lazy_loader.py for transformers models in order to reduce CPU memory usage @@ -661,7 +661,7 @@ def loadmodelsettings(): js = {} if vars.model_type == "xglm" or js.get("compat", "j") == "fairseq_lm": vars.newlinemode = "s" # Default to newline mode if using XGLM - if vars.model_type == "opt": + if vars.model_type == "opt" or vars.model_type == "bloom": vars.newlinemode = "ns" # Handle but don't convert newlines if using Fairseq models that have newlines trained in them vars.modelconfig = js if("badwordsids" in js):