mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Merge pull request #349 from Zurnaz/llama_config
feat: llama config and updated mtj requirement
This commit is contained in:
35
maps/llama.json
Normal file
35
maps/llama.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"mtj_compat": "llama",
|
||||
"mtj_pe": "neox_rotary",
|
||||
"mtj_config_map": {
|
||||
"norm": ["norm", "layernorm-nobias"],
|
||||
"pe_rotary_dims": ["pe_rotary_dims", 128],
|
||||
"d_model": "hidden_size",
|
||||
"n_heads": "num_attention_heads",
|
||||
"n_vocab": "vocab_size",
|
||||
"layers": "num_hidden_layers",
|
||||
"seq": "max_position_embeddings",
|
||||
"tokenizer_class": ["tokenizer_class", "LlamaTokenizer"],
|
||||
"tokenizer": ["tokenizer", "llama"]
|
||||
},
|
||||
"static_weights": {
|
||||
"model.embed_tokens.weight": {"mtj": {"module": "embedding_shard/~/linear", "param": "w", "transforms": ["no_transpose", "vocab_pad"]}},
|
||||
"model.norm.weight": {"mtj": {"module": "projection_shard/~/replicated_layer_norm", "param": "scale"}},
|
||||
"lm_head.weight": {"mtj": {"module": "projection_shard/~/linear", "param": "w", "transforms": ["vocab_pad"]}}
|
||||
},
|
||||
"layer_weights": {
|
||||
"transformer.h.{layer}.attn.attention.bias": {},
|
||||
"transformer.h.{layer}.attn.attention.masked_bias": {},
|
||||
"model.layers.{layer}.self_attn.rotary_emb.inv_freq": {},
|
||||
"model.layers.{layer}.self_attn.q_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear", "param": "w"}},
|
||||
"model.layers.{layer}.self_attn.v_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_1", "param": "w"}},
|
||||
"model.layers.{layer}.self_attn.k_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_2", "param": "w"}},
|
||||
"model.layers.{layer}.self_attn.o_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_3", "param": "w"}},
|
||||
"model.layers.{layer}.mlp.gate_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_4", "param": "w"}},
|
||||
"model.layers.{layer}.mlp.down_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_5", "param": "w"}},
|
||||
"model.layers.{layer}.mlp.up_proj.weight": {"mtj": {"module": "layer_{layer}/~/linear_6", "param": "w"}},
|
||||
"model.layers.{layer}.input_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm", "param": "scale"}},
|
||||
"model.layers.{layer}.post_attention_layernorm.weight": {"mtj": {"module": "layer_{layer}/~/replicated_layer_norm_1", "param": "scale"}}
|
||||
}
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ transformers == 4.28.0
|
||||
chex == 0.1.5
|
||||
huggingface_hub==0.12.1
|
||||
progressbar2
|
||||
git+https://github.com/VE-FORBRYDERNE/mesh-transformer-jax@ck
|
||||
git+https://github.com/Zurnaz/mesh-transformer-jax.git@llama_tpu
|
||||
Flask==2.2.3
|
||||
Flask-SocketIO==5.3.2
|
||||
python-socketio==5.7.2
|
||||
|
Reference in New Issue
Block a user