diff --git a/aiserver.py b/aiserver.py index f3117604..629dcba3 100644 --- a/aiserver.py +++ b/aiserver.py @@ -2380,6 +2380,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal with zipfile.ZipFile(f, "r") as z: try: last_storage_key = None + zipfolder = os.path.basename(os.path.normpath(f)).split('.')[0] f = None current_offset = 0 able_to_pin_layers = True @@ -2391,7 +2392,10 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal last_storage_key = storage_key if isinstance(f, zipfile.ZipExtFile): f.close() - f = z.open(f"archive/data/{storage_key}") + try: + f = z.open(f"archive/data/{storage_key}") + except: + f = z.open(f"{zipfolder}/data/{storage_key}") current_offset = 0 if current_offset != model_dict[key].seek_offset: f.read(model_dict[key].seek_offset - current_offset) diff --git a/static/application.js b/static/application.js index 6cdb531b..adee4f42 100644 --- a/static/application.js +++ b/static/application.js @@ -3492,28 +3492,26 @@ $(document).ready(function(){ // Shortcuts $(window).keydown(function (ev) { - // Only ctrl prefixed (for now) - if (!ev.ctrlKey) return; - - let handled = true; - switch (ev.key) { - // Ctrl+Z - Back - case "z": - button_actback.click(); - break; - // Ctrl+Y - Forward - case "y": - button_actfwd.click(); - break; - // Ctrl+E - Retry - case "e": - button_actretry.click(); - break; - default: - handled = false; + if (ev.altKey) + switch (ev.key) { + // Alt+Z - Back + case "z": + button_actback.click(); + break; + // Alt+Y - Forward + case "y": + button_actfwd.click(); + break; + // Alt+R - Retry + case "r": + button_actretry.click(); + break; + default: + return; + } else { + return; } - - if (handled) ev.preventDefault(); + ev.preventDefault(); }); $("#anotetemplate").on("input", function() { @@ -3796,4 +3794,4 @@ function getSelectedOptions(element) { output.push(item.value); } return output; -} \ No newline at end of file +} diff --git a/torch_lazy_loader.py b/torch_lazy_loader.py index 1298335d..fae49e51 100644 --- a/torch_lazy_loader.py +++ b/torch_lazy_loader.py @@ -54,6 +54,7 @@ import numpy as np import collections import _codecs import utils +import os from torch.nn import Module from typing import Any, Callable, Dict, Optional, Tuple, Type, Union @@ -93,12 +94,16 @@ class LazyTensor: def __repr__(self): return self.__view(repr) - def materialize(self, checkpoint: Union[zipfile.ZipFile, zipfile.ZipExtFile], map_location=None, no_grad=True) -> torch.Tensor: + def materialize(self, checkpoint: Union[zipfile.ZipFile, zipfile.ZipExtFile], map_location=None, no_grad=True, filename="pytorch_model.bin") -> torch.Tensor: + filename = os.path.basename(os.path.normpath(filename)).split('.')[0] size = reduce(lambda x, y: x * y, self.shape, 1) dtype = self.dtype nbytes = size if dtype is torch.bool else size * ((torch.finfo if dtype.is_floating_point else torch.iinfo)(dtype).bits >> 3) if isinstance(checkpoint, zipfile.ZipFile): - f = checkpoint.open(f"archive/data/{self.key}", "r") + try: + f = checkpoint.open(f"archive/data/{self.key}", "r") + except: + f = checkpoint.open(f"{filename}/data/{self.key}", "r") f.read(self.seek_offset) else: f = checkpoint diff --git a/tpu_mtj_backend.py b/tpu_mtj_backend.py index 9bb1fda2..4b27493e 100644 --- a/tpu_mtj_backend.py +++ b/tpu_mtj_backend.py @@ -1251,6 +1251,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo with zipfile.ZipFile(f, "r") as z: try: last_storage_key = None + zipfolder = os.path.basename(os.path.normpath(f)).split('.')[0] f = None current_offset = 0 if utils.current_shard == 0: @@ -1283,7 +1284,10 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo last_storage_key = storage_key if isinstance(f, zipfile.ZipExtFile): f.close() - f = z.open(f"archive/data/{storage_key}") + try: + f = z.open(f"archive/data/{storage_key}") + except: + f = z.open(f"{zipfolder}/data/{storage_key}") current_offset = 0 if current_offset != model_dict[key].seek_offset: f.read(model_dict[key].seek_offset - current_offset)