From c3ccdb0b50276da9ee38bfdca125ae22580ebc62 Mon Sep 17 00:00:00 2001 From: ebolam Date: Fri, 28 Oct 2022 14:19:50 -0400 Subject: [PATCH] Potential Colab URL fix --- aiserver.py | 4 +--- koboldai_settings.py | 10 ++++++++-- tpu_mtj_backend.py | 6 +++--- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/aiserver.py b/aiserver.py index 33589d84..af1d1f3c 100644 --- a/aiserver.py +++ b/aiserver.py @@ -91,7 +91,6 @@ if lupa.LUA_VERSION[:2] != (5, 4): logger.error(f"Please install lupa==1.10. You have lupa {lupa.__version__}.") patch_causallm_patched = False -cloudflare = "" # Make sure tqdm progress bars display properly in Colab from tqdm.auto import tqdm @@ -3094,8 +3093,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal hf_checkpoint=koboldai_vars.model not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and koboldai_vars.use_colab_tpu, socketio_queue=koboldai_settings.queue, - initial_load=initial_load, logger=logger, cloudflare=cloudflare, - **koboldai_vars.modelconfig) + initial_load=initial_load, logger=logger, **koboldai_vars.modelconfig) #tpool.execute(tpu_mtj_backend.load_model, koboldai_vars.custmodpth, hf_checkpoint=koboldai_vars.model not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and koboldai_vars.use_colab_tpu, **koboldai_vars.modelconfig) koboldai_vars.modeldim = int(tpu_mtj_backend.params.get("d_embed", tpu_mtj_backend.params["d_model"])) tokenizer = tpu_mtj_backend.tokenizer diff --git a/koboldai_settings.py b/koboldai_settings.py index 354aaad0..a8388d69 100644 --- a/koboldai_settings.py +++ b/koboldai_settings.py @@ -987,8 +987,14 @@ class user_settings(settings): process_variable_changes(self.socketio, self.__class__.__name__.replace("_settings", ""), name, value, old_value) class system_settings(settings): - local_only_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', 'comregex_ui', 'sp', '_horde_pid', 'inference_config', 'image_pipeline', 'summarizer', 'summary_tokenizer'] - no_save_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', 'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy', 'serverstarted', 'inference_config', 'image_pipeline', 'summarizer', 'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy'] + local_only_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', + 'lua_koboldcore', 'regex_sl', 'acregex_ai', 'acregex_ui', 'comregex_ai', + 'comregex_ui', 'sp', '_horde_pid', 'inference_config', 'image_pipeline', + 'summarizer', 'summary_tokenizer'] + no_save_variables = ['socketio', 'lua_state', 'lua_logname', 'lua_koboldbridge', 'lua_kobold', + 'lua_koboldcore', 'sp', 'sp_length', '_horde_pid', 'horde_share', 'aibusy', + 'serverstarted', 'inference_config', 'image_pipeline', 'summarizer', + 'summary_tokenizer', 'use_colab_tpu', 'noai', 'disable_set_aibusy', 'cloudflare_link'] settings_name = "system" def __init__(self, socketio, koboldai_var): self.socketio = socketio diff --git a/tpu_mtj_backend.py b/tpu_mtj_backend.py index 88a0ef78..f29adda4 100644 --- a/tpu_mtj_backend.py +++ b/tpu_mtj_backend.py @@ -1080,7 +1080,7 @@ def read_neox_checkpoint(state, path, config, checkpoint_shards=2): koboldai_vars.status_message = "" -def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpoint=False, socketio_queue=None, initial_load=False, logger=None, cloudflare="", **kwargs) -> None: +def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpoint=False, socketio_queue=None, initial_load=False, logger=None, **kwargs) -> None: global thread_resources_env, seq, tokenizer, network, params, pad_token_id if "pad_token_id" in kwargs: @@ -1251,8 +1251,8 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo thread_resources_env = maps.ResourceEnv(maps.Mesh(devices, ('dp', 'mp')), ()) maps.thread_resources.env = thread_resources_env if initial_load: - logger.message(f"KoboldAI has finished loading and is available at the following link for UI 1: {cloudflare}") - logger.message(f"KoboldAI has finished loading and is available at the following link for UI 2: {cloudflare}/new_ui") + logger.message(f"KoboldAI has finished loading and is available at the following link for UI 1: {koboldai_vars.cloudflare_link}") + logger.message(f"KoboldAI has finished loading and is available at the following link for UI 2: {koboldai_vars.cloudflare_link}/new_ui") global shard_xmap, batch_xmap