Add final layer norm to OPT

This commit is contained in:
Gnome Ann 2022-06-21 16:36:26 -04:00
parent a10446f258
commit a7e3ef71aa
1 changed files with 2 additions and 0 deletions

View File

@ -12,6 +12,8 @@
"decoder.embed_tokens.weight": {"mtj": {"module": "embedding_shard/~/linear", "param": "w", "transforms": ["no_transpose", "vocab_pad"]}},
"decoder.project_in.weight": {"mtj": {"module": "embedding_shard", "param": "project_in"}},
"decoder.embed_positions.weight": {"mtj": {"module": "embedding_shard", "param": "pos_embs", "transforms": ["no_transpose", "remove_first_two_rows"]}},
"decoder.final_layer_norm.weight": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "scale"}},
"decoder.final_layer_norm.bias": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "offset"}},
"decoder.project_out.weight": {"mtj": {"module": "projection_shard", "param": "project_out"}}
},
"layer_weights": {