From 43a4abaf6320cc86e244cf103cc93b520339550e Mon Sep 17 00:00:00 2001 From: somebody Date: Mon, 24 Jul 2023 13:10:33 -0500 Subject: [PATCH] Remove even more debug --- modeling/lazy_loader.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/modeling/lazy_loader.py b/modeling/lazy_loader.py index a5e7c58f..74770a1c 100644 --- a/modeling/lazy_loader.py +++ b/modeling/lazy_loader.py @@ -176,9 +176,6 @@ class TorchLazyTensor(LazyTensor): CheckpointChunkCache.key = self.key ziproot = checkpoint.namelist()[0].split("/")[0] CheckpointChunkCache.handle = checkpoint.open(f"{ziproot}/data/{self.key}", "r") - - - else: # Cache hit. Hip hip hooray! :^) # print(".", end="", flush=True) @@ -318,7 +315,6 @@ class _LazyUnpickler(RestrictedUnpickler): lazy_loaded_storages: Dict[str, LazyTensor] def __init__(self, *args, **kwargs): - # print(args, kwargs) self.lazy_loaded_storages = {} return super().__init__(*args, **kwargs) @@ -364,12 +360,10 @@ def safetensors_load_tensor_independently( def patch_safetensors(callback): - print("Hi! We are patching safetensors") # Safetensors load patch import transformers def safetensors_load(checkpoint_file: str) -> dict: - print("LOAD NOW", safetensors_load) # Monkeypatch applied to safetensors.torch.load_file if utils.koboldai_vars.hascuda: @@ -523,7 +517,6 @@ def use_lazy_load( old_torch_load = torch.load def torch_load(f, map_location=None, pickle_module=pickle, **pickle_load_args): - print("TORCHLOAD", f) model_dict = old_torch_load( f=f, map_location=map_location,