Merge pull request #196 from henk717/united

Improved model support & Shortcut Fixes
This commit is contained in:
henk717 2022-12-18 20:18:13 +01:00 committed by GitHub
commit 75fecb86cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 37 additions and 26 deletions

View File

@ -2380,6 +2380,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
with zipfile.ZipFile(f, "r") as z: with zipfile.ZipFile(f, "r") as z:
try: try:
last_storage_key = None last_storage_key = None
zipfolder = os.path.basename(os.path.normpath(f)).split('.')[0]
f = None f = None
current_offset = 0 current_offset = 0
able_to_pin_layers = True able_to_pin_layers = True
@ -2391,7 +2392,10 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal
last_storage_key = storage_key last_storage_key = storage_key
if isinstance(f, zipfile.ZipExtFile): if isinstance(f, zipfile.ZipExtFile):
f.close() f.close()
try:
f = z.open(f"archive/data/{storage_key}") f = z.open(f"archive/data/{storage_key}")
except:
f = z.open(f"{zipfolder}/data/{storage_key}")
current_offset = 0 current_offset = 0
if current_offset != model_dict[key].seek_offset: if current_offset != model_dict[key].seek_offset:
f.read(model_dict[key].seek_offset - current_offset) f.read(model_dict[key].seek_offset - current_offset)

View File

@ -3492,28 +3492,26 @@ $(document).ready(function(){
// Shortcuts // Shortcuts
$(window).keydown(function (ev) { $(window).keydown(function (ev) {
// Only ctrl prefixed (for now) if (ev.altKey)
if (!ev.ctrlKey) return;
let handled = true;
switch (ev.key) { switch (ev.key) {
// Ctrl+Z - Back // Alt+Z - Back
case "z": case "z":
button_actback.click(); button_actback.click();
break; break;
// Ctrl+Y - Forward // Alt+Y - Forward
case "y": case "y":
button_actfwd.click(); button_actfwd.click();
break; break;
// Ctrl+E - Retry // Alt+R - Retry
case "e": case "r":
button_actretry.click(); button_actretry.click();
break; break;
default: default:
handled = false; return;
} else {
return;
} }
ev.preventDefault();
if (handled) ev.preventDefault();
}); });
$("#anotetemplate").on("input", function() { $("#anotetemplate").on("input", function() {

View File

@ -54,6 +54,7 @@ import numpy as np
import collections import collections
import _codecs import _codecs
import utils import utils
import os
from torch.nn import Module from torch.nn import Module
from typing import Any, Callable, Dict, Optional, Tuple, Type, Union from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
@ -93,12 +94,16 @@ class LazyTensor:
def __repr__(self): def __repr__(self):
return self.__view(repr) return self.__view(repr)
def materialize(self, checkpoint: Union[zipfile.ZipFile, zipfile.ZipExtFile], map_location=None, no_grad=True) -> torch.Tensor: def materialize(self, checkpoint: Union[zipfile.ZipFile, zipfile.ZipExtFile], map_location=None, no_grad=True, filename="pytorch_model.bin") -> torch.Tensor:
filename = os.path.basename(os.path.normpath(filename)).split('.')[0]
size = reduce(lambda x, y: x * y, self.shape, 1) size = reduce(lambda x, y: x * y, self.shape, 1)
dtype = self.dtype dtype = self.dtype
nbytes = size if dtype is torch.bool else size * ((torch.finfo if dtype.is_floating_point else torch.iinfo)(dtype).bits >> 3) nbytes = size if dtype is torch.bool else size * ((torch.finfo if dtype.is_floating_point else torch.iinfo)(dtype).bits >> 3)
if isinstance(checkpoint, zipfile.ZipFile): if isinstance(checkpoint, zipfile.ZipFile):
try:
f = checkpoint.open(f"archive/data/{self.key}", "r") f = checkpoint.open(f"archive/data/{self.key}", "r")
except:
f = checkpoint.open(f"{filename}/data/{self.key}", "r")
f.read(self.seek_offset) f.read(self.seek_offset)
else: else:
f = checkpoint f = checkpoint

View File

@ -1251,6 +1251,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
with zipfile.ZipFile(f, "r") as z: with zipfile.ZipFile(f, "r") as z:
try: try:
last_storage_key = None last_storage_key = None
zipfolder = os.path.basename(os.path.normpath(f)).split('.')[0]
f = None f = None
current_offset = 0 current_offset = 0
if utils.current_shard == 0: if utils.current_shard == 0:
@ -1283,7 +1284,10 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
last_storage_key = storage_key last_storage_key = storage_key
if isinstance(f, zipfile.ZipExtFile): if isinstance(f, zipfile.ZipExtFile):
f.close() f.close()
try:
f = z.open(f"archive/data/{storage_key}") f = z.open(f"archive/data/{storage_key}")
except:
f = z.open(f"{zipfolder}/data/{storage_key}")
current_offset = 0 current_offset = 0
if current_offset != model_dict[key].seek_offset: if current_offset != model_dict[key].seek_offset:
f.read(model_dict[key].seek_offset - current_offset) f.read(model_dict[key].seek_offset - current_offset)