Merge pull request #132 from VE-FORBRYDERNE/gpt2
Fix an error that occurs when loading GPT-2 models
This commit is contained in:
commit
4482e6db9a
|
@ -1613,7 +1613,10 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go
|
||||||
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem)
|
model = GPTNeoForCausalLM.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", **lowmem)
|
||||||
else:
|
else:
|
||||||
old_rebuild_tensor = torch._utils._rebuild_tensor
|
old_rebuild_tensor = torch._utils._rebuild_tensor
|
||||||
def new_rebuild_tensor(storage, storage_offset, shape, stride):
|
def new_rebuild_tensor(storage: Union[torch_lazy_loader.LazyTensor, torch.Storage], storage_offset, shape, stride):
|
||||||
|
if(not isinstance(storage, torch_lazy_loader.LazyTensor)):
|
||||||
|
dtype = storage.dtype
|
||||||
|
else:
|
||||||
dtype = storage.storage_type.dtype
|
dtype = storage.storage_type.dtype
|
||||||
if(not isinstance(dtype, torch.dtype)):
|
if(not isinstance(dtype, torch.dtype)):
|
||||||
dtype = storage.storage_type(0).dtype
|
dtype = storage.storage_type(0).dtype
|
||||||
|
|
Loading…
Reference in New Issue