From f9fb5eba89f12e3bcaf2f4610e6835ff1939cfae Mon Sep 17 00:00:00 2001 From: somebody Date: Sat, 15 Apr 2023 18:56:49 -0500 Subject: [PATCH] Remove debug --- modeling/inference_models/hf_torch.py | 1 - utils.py | 1 - 2 files changed, 2 deletions(-) diff --git a/modeling/inference_models/hf_torch.py b/modeling/inference_models/hf_torch.py index 27af6e6e..a2b2ff80 100644 --- a/modeling/inference_models/hf_torch.py +++ b/modeling/inference_models/hf_torch.py @@ -487,7 +487,6 @@ class HFTorchInferenceModel(HFInferenceModel): utils.koboldai_vars.status_message = "Loading model" utils.koboldai_vars.total_layers = num_tensors utils.koboldai_vars.loaded_layers = 0 - print(utils.emit) utils.bar = tqdm( total=num_tensors, desc="Loading model tensors", diff --git a/utils.py b/utils.py index 1841bac9..5b7012fc 100644 --- a/utils.py +++ b/utils.py @@ -639,7 +639,6 @@ class UIProgressBarFile(object): with flask_app.app_context(): self.emit_func('from_server', {'cmd': 'model_load_status', 'data': bar.replace(" ", " ")}, broadcast=True, room="UI_1", namespace="/") except Exception as e: - print(e) pass def flush(self):