From 5dd67d027a76ec0bb7afd2525d521407143f74dc Mon Sep 17 00:00:00 2001 From: somebody Date: Sat, 15 Apr 2023 18:34:25 -0500 Subject: [PATCH] Workaround for socketio context errors for loading --- aiserver.py | 4 ++++ modeling/inference_models/hf_torch.py | 1 + utils.py | 9 +++++++-- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/aiserver.py b/aiserver.py index 2530555d..6b243efb 100644 --- a/aiserver.py +++ b/aiserver.py @@ -585,6 +585,10 @@ app = Flask(__name__, root_path=os.getcwd()) app.secret_key = secrets.token_hex() app.config['SESSION_TYPE'] = 'filesystem' app.config['TEMPLATES_AUTO_RELOAD'] = True + +# Hack for socket stuff that needs app context +utils.flask_app = app + Compress(app) socketio = SocketIO(app, async_method="eventlet", manage_session=False, cors_allowed_origins='*', max_http_buffer_size=10_000_000) #socketio = SocketIO(app, async_method="eventlet", manage_session=False, cors_allowed_origins='*', max_http_buffer_size=10_000_000, logger=logger, engineio_logger=True) diff --git a/modeling/inference_models/hf_torch.py b/modeling/inference_models/hf_torch.py index a2b2ff80..27af6e6e 100644 --- a/modeling/inference_models/hf_torch.py +++ b/modeling/inference_models/hf_torch.py @@ -487,6 +487,7 @@ class HFTorchInferenceModel(HFInferenceModel): utils.koboldai_vars.status_message = "Loading model" utils.koboldai_vars.total_layers = num_tensors utils.koboldai_vars.loaded_layers = 0 + print(utils.emit) utils.bar = tqdm( total=num_tensors, desc="Loading model tensors", diff --git a/utils.py b/utils.py index 90c514ca..1841bac9 100644 --- a/utils.py +++ b/utils.py @@ -36,6 +36,9 @@ default_sampler_order = [6, 0, 1, 2, 3, 4, 5] emit = None +# Hack for socket stuff that needs app context +flask_app = None + #==================================================================# # Decorator to prevent a function's actions from being run until # at least x seconds have passed without the function being called @@ -633,8 +636,10 @@ class UIProgressBarFile(object): print('\r' + bar, end='') time.sleep(0.01) try: - self.emit_func('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True, room="UI_1") - except: + with flask_app.app_context(): + self.emit_func('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True, room="UI_1", namespace="/") + except Exception as e: + print(e) pass def flush(self):