Remove even more debug

This commit is contained in:
somebody
2023-07-24 13:10:33 -05:00
parent 929917efe9
commit 43a4abaf63

View File

@@ -176,9 +176,6 @@ class TorchLazyTensor(LazyTensor):
CheckpointChunkCache.key = self.key CheckpointChunkCache.key = self.key
ziproot = checkpoint.namelist()[0].split("/")[0] ziproot = checkpoint.namelist()[0].split("/")[0]
CheckpointChunkCache.handle = checkpoint.open(f"{ziproot}/data/{self.key}", "r") CheckpointChunkCache.handle = checkpoint.open(f"{ziproot}/data/{self.key}", "r")
else: else:
# Cache hit. Hip hip hooray! :^) # Cache hit. Hip hip hooray! :^)
# print(".", end="", flush=True) # print(".", end="", flush=True)
@@ -318,7 +315,6 @@ class _LazyUnpickler(RestrictedUnpickler):
lazy_loaded_storages: Dict[str, LazyTensor] lazy_loaded_storages: Dict[str, LazyTensor]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# print(args, kwargs)
self.lazy_loaded_storages = {} self.lazy_loaded_storages = {}
return super().__init__(*args, **kwargs) return super().__init__(*args, **kwargs)
@@ -364,12 +360,10 @@ def safetensors_load_tensor_independently(
def patch_safetensors(callback): def patch_safetensors(callback):
print("Hi! We are patching safetensors")
# Safetensors load patch # Safetensors load patch
import transformers import transformers
def safetensors_load(checkpoint_file: str) -> dict: def safetensors_load(checkpoint_file: str) -> dict:
print("LOAD NOW", safetensors_load)
# Monkeypatch applied to safetensors.torch.load_file # Monkeypatch applied to safetensors.torch.load_file
if utils.koboldai_vars.hascuda: if utils.koboldai_vars.hascuda:
@@ -523,7 +517,6 @@ def use_lazy_load(
old_torch_load = torch.load old_torch_load = torch.load
def torch_load(f, map_location=None, pickle_module=pickle, **pickle_load_args): def torch_load(f, map_location=None, pickle_module=pickle, **pickle_load_args):
print("TORCHLOAD", f)
model_dict = old_torch_load( model_dict = old_torch_load(
f=f, f=f,
map_location=map_location, map_location=map_location,