Add vocab padding to embedding bias in gptj.json

This commit is contained in:
vfbd
2022-11-02 19:02:09 -04:00
parent 7b5a766b4a
commit b20d80ca2a
2 changed files with 3 additions and 3 deletions

View File

@@ -1304,7 +1304,7 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
if "divide_by_shards" in transforms:
tensor /= params["cores_per_replica"]
if "vocab_pad" in transforms:
tensor = torch.nn.functional.pad(tensor, (0, 0, 0, params["n_vocab_padding"]))
tensor = torch.nn.functional.pad(tensor, (0,) * (tensor.ndim * 2 - 1) + (params["n_vocab_padding"],))
if "no_transpose" not in transforms and tensor.ndim == 2:
tensor = tensor.T
tensor.unsqueeze_(0)