diff --git a/aiserver.py b/aiserver.py index 0d3ea356..87a4eca3 100644 --- a/aiserver.py +++ b/aiserver.py @@ -756,6 +756,12 @@ def getmodelname(): modelname = vars.model return modelname +#==================================================================# +# Get hidden size from model +#==================================================================# +def get_hidden_size_from_model(model): + return model.get_input_embeddings().embedding_dim + #==================================================================# # Breakmodel configuration functions #==================================================================# @@ -2415,9 +2421,6 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal return lazy_load_callback - def get_hidden_size_from_model(model): - return model.get_input_embeddings().embedding_dim - def maybe_low_cpu_mem_usage() -> Dict[str, Any]: if(packaging.version.parse(transformers_version) < packaging.version.parse("4.11.0")): logger.warning(f"Please upgrade to transformers 4.11.0 for lower RAM usage. You have transformers {transformers_version}.") @@ -2905,7 +2908,7 @@ def lua_startup(): except lupa.LuaError as e: print(colors.RED + "ERROR!" + colors.END) vars.lua_koboldbridge.obliterate_multiverse() - logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.error('LUA ERROR: ' + str(e).replace("\033", "")) logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") exit(1) logger.init_ok("LUA bridge", status="OK") @@ -3463,7 +3466,7 @@ def execute_inmod(): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.error('LUA ERROR: ' + str(e).replace("\033", "")) logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") set_aibusy(0) @@ -3480,7 +3483,7 @@ def execute_outmod(): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.error('LUA ERROR: ' + str(e).replace("\033", "")) logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") set_aibusy(0) if(vars.lua_koboldbridge.resend_settings_required): @@ -4900,7 +4903,7 @@ def generate(txt, minimum, maximum, found_entries=None): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.error('LUA ERROR: ' + str(e).replace("\033", "")) logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") else: emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True) @@ -5408,7 +5411,7 @@ def tpumtjgenerate(txt, minimum, maximum, found_entries=None): vars.lua_running = False emit('from_server', {'cmd': 'errmsg', 'data': 'Lua script error; please check console.'}, broadcast=True) sendUSStatItems() - logger.debug('LUA ERROR: ' + str(e).replace("\033", "")) + logger.error('LUA ERROR: ' + str(e).replace("\033", "")) logger.warning("Lua engine stopped; please open 'Userscripts' and press Load to reinitialize scripts.") else: emit('from_server', {'cmd': 'errmsg', 'data': 'Error occurred during generator call; please check console.'}, broadcast=True)