llama-2-13b-JAX / flax_model.msgpack.index.json
Erland's picture
Add model
a7bc502 verified
{
"metadata": {
"total_size": 52063457280
},
"weight_map": {
"lm_head/kernel": "flax_model-00011-of-00011.msgpack",
"model/embed_tokens/embedding": "flax_model-00001-of-00011.msgpack",
"model/layers/0/input_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/0/mlp/down_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/mlp/gate_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/mlp/up_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/post_attention_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/0/self_attn/k_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/self_attn/o_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/self_attn/q_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/0/self_attn/v_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/input_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/1/mlp/down_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/mlp/gate_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/mlp/up_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/post_attention_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/1/self_attn/k_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/self_attn/o_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/self_attn/q_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/1/self_attn/v_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/10/input_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/10/mlp/down_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/mlp/gate_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/mlp/up_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/post_attention_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/10/self_attn/k_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/self_attn/o_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/self_attn/q_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/10/self_attn/v_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/11/input_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/11/mlp/down_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/11/mlp/gate_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/11/mlp/up_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/11/post_attention_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/11/self_attn/k_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/11/self_attn/o_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/11/self_attn/q_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/11/self_attn/v_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/input_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/12/mlp/down_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/mlp/gate_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/mlp/up_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/post_attention_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/12/self_attn/k_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/self_attn/o_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/self_attn/q_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/12/self_attn/v_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/input_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/13/mlp/down_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/mlp/gate_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/mlp/up_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/post_attention_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/13/self_attn/k_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/self_attn/o_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/self_attn/q_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/13/self_attn/v_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/input_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/14/mlp/down_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/mlp/gate_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/mlp/up_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/post_attention_layernorm/weight": "flax_model-00004-of-00011.msgpack",
"model/layers/14/self_attn/k_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/self_attn/o_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/self_attn/q_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/14/self_attn/v_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/15/input_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/15/mlp/down_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/15/mlp/gate_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/15/mlp/up_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/15/post_attention_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/15/self_attn/k_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/15/self_attn/o_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/15/self_attn/q_proj/kernel": "flax_model-00004-of-00011.msgpack",
"model/layers/15/self_attn/v_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/input_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/16/mlp/down_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/mlp/gate_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/mlp/up_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/post_attention_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/16/self_attn/k_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/self_attn/o_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/self_attn/q_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/16/self_attn/v_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/input_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/17/mlp/down_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/mlp/gate_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/mlp/up_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/post_attention_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/17/self_attn/k_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/self_attn/o_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/self_attn/q_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/17/self_attn/v_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/input_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/18/mlp/down_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/mlp/gate_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/mlp/up_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/post_attention_layernorm/weight": "flax_model-00005-of-00011.msgpack",
"model/layers/18/self_attn/k_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/self_attn/o_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/self_attn/q_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/18/self_attn/v_proj/kernel": "flax_model-00005-of-00011.msgpack",
"model/layers/19/input_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/19/mlp/down_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/mlp/gate_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/mlp/up_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/post_attention_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/19/self_attn/k_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/self_attn/o_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/self_attn/q_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/19/self_attn/v_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/2/input_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/2/mlp/down_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/mlp/gate_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/mlp/up_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/post_attention_layernorm/weight": "flax_model-00001-of-00011.msgpack",
"model/layers/2/self_attn/k_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/self_attn/o_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/self_attn/q_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/2/self_attn/v_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/20/input_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/20/mlp/down_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/mlp/gate_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/mlp/up_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/post_attention_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/20/self_attn/k_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/self_attn/o_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/self_attn/q_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/20/self_attn/v_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/input_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/21/mlp/down_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/mlp/gate_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/mlp/up_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/post_attention_layernorm/weight": "flax_model-00006-of-00011.msgpack",
"model/layers/21/self_attn/k_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/self_attn/o_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/self_attn/q_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/21/self_attn/v_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/input_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/22/mlp/down_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/22/mlp/gate_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/mlp/up_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/post_attention_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/22/self_attn/k_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/self_attn/o_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/self_attn/q_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/22/self_attn/v_proj/kernel": "flax_model-00006-of-00011.msgpack",
"model/layers/23/input_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/23/mlp/down_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/mlp/gate_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/mlp/up_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/post_attention_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/23/self_attn/k_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/self_attn/o_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/self_attn/q_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/23/self_attn/v_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/input_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/24/mlp/down_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/mlp/gate_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/mlp/up_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/post_attention_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/24/self_attn/k_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/self_attn/o_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/self_attn/q_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/24/self_attn/v_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/input_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/25/mlp/down_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/mlp/gate_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/mlp/up_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/post_attention_layernorm/weight": "flax_model-00007-of-00011.msgpack",
"model/layers/25/self_attn/k_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/self_attn/o_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/self_attn/q_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/25/self_attn/v_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/26/input_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/26/mlp/down_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/26/mlp/gate_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/26/mlp/up_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/26/post_attention_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/26/self_attn/k_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/26/self_attn/o_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/26/self_attn/q_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/26/self_attn/v_proj/kernel": "flax_model-00007-of-00011.msgpack",
"model/layers/27/input_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/27/mlp/down_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/mlp/gate_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/mlp/up_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/post_attention_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/27/self_attn/k_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/self_attn/o_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/self_attn/q_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/27/self_attn/v_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/input_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/28/mlp/down_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/mlp/gate_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/mlp/up_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/post_attention_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/28/self_attn/k_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/self_attn/o_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/self_attn/q_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/28/self_attn/v_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/input_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/29/mlp/down_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/mlp/gate_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/mlp/up_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/post_attention_layernorm/weight": "flax_model-00008-of-00011.msgpack",
"model/layers/29/self_attn/k_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/self_attn/o_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/self_attn/q_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/29/self_attn/v_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/3/input_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/3/mlp/down_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/3/mlp/gate_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/3/mlp/up_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/3/post_attention_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/3/self_attn/k_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/3/self_attn/o_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/3/self_attn/q_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/3/self_attn/v_proj/kernel": "flax_model-00001-of-00011.msgpack",
"model/layers/30/input_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/30/mlp/down_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/30/mlp/gate_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/30/mlp/up_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/30/post_attention_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/30/self_attn/k_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/30/self_attn/o_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/30/self_attn/q_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/30/self_attn/v_proj/kernel": "flax_model-00008-of-00011.msgpack",
"model/layers/31/input_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/31/mlp/down_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/mlp/gate_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/mlp/up_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/post_attention_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/31/self_attn/k_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/self_attn/o_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/self_attn/q_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/31/self_attn/v_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/input_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/32/mlp/down_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/mlp/gate_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/mlp/up_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/post_attention_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/32/self_attn/k_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/self_attn/o_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/self_attn/q_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/32/self_attn/v_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/input_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/33/mlp/down_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/mlp/gate_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/mlp/up_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/post_attention_layernorm/weight": "flax_model-00009-of-00011.msgpack",
"model/layers/33/self_attn/k_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/self_attn/o_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/self_attn/q_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/33/self_attn/v_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/34/input_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/34/mlp/down_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/34/mlp/gate_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/34/mlp/up_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/34/post_attention_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/34/self_attn/k_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/34/self_attn/o_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/34/self_attn/q_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/34/self_attn/v_proj/kernel": "flax_model-00009-of-00011.msgpack",
"model/layers/35/input_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/35/mlp/down_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/mlp/gate_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/mlp/up_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/post_attention_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/35/self_attn/k_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/self_attn/o_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/self_attn/q_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/35/self_attn/v_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/input_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/36/mlp/down_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/mlp/gate_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/mlp/up_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/post_attention_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/36/self_attn/k_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/self_attn/o_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/self_attn/q_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/36/self_attn/v_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/input_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/37/mlp/down_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/mlp/gate_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/mlp/up_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/post_attention_layernorm/weight": "flax_model-00010-of-00011.msgpack",
"model/layers/37/self_attn/k_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/self_attn/o_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/self_attn/q_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/37/self_attn/v_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/38/input_layernorm/weight": "flax_model-00011-of-00011.msgpack",
"model/layers/38/mlp/down_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/38/mlp/gate_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/38/mlp/up_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/38/post_attention_layernorm/weight": "flax_model-00011-of-00011.msgpack",
"model/layers/38/self_attn/k_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/38/self_attn/o_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/38/self_attn/q_proj/kernel": "flax_model-00010-of-00011.msgpack",
"model/layers/38/self_attn/v_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/input_layernorm/weight": "flax_model-00011-of-00011.msgpack",
"model/layers/39/mlp/down_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/mlp/gate_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/mlp/up_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/post_attention_layernorm/weight": "flax_model-00011-of-00011.msgpack",
"model/layers/39/self_attn/k_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/self_attn/o_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/self_attn/q_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/39/self_attn/v_proj/kernel": "flax_model-00011-of-00011.msgpack",
"model/layers/4/input_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/4/mlp/down_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/mlp/gate_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/mlp/up_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/post_attention_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/4/self_attn/k_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/self_attn/o_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/self_attn/q_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/4/self_attn/v_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/input_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/5/mlp/down_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/mlp/gate_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/mlp/up_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/post_attention_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/5/self_attn/k_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/self_attn/o_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/self_attn/q_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/5/self_attn/v_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/input_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/6/mlp/down_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/mlp/gate_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/mlp/up_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/post_attention_layernorm/weight": "flax_model-00002-of-00011.msgpack",
"model/layers/6/self_attn/k_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/self_attn/o_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/self_attn/q_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/6/self_attn/v_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/7/input_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/7/mlp/down_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/7/mlp/gate_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/7/mlp/up_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/7/post_attention_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/7/self_attn/k_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/7/self_attn/o_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/7/self_attn/q_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/7/self_attn/v_proj/kernel": "flax_model-00002-of-00011.msgpack",
"model/layers/8/input_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/8/mlp/down_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/mlp/gate_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/mlp/up_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/post_attention_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/8/self_attn/k_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/self_attn/o_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/self_attn/q_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/8/self_attn/v_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/input_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/9/mlp/down_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/mlp/gate_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/mlp/up_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/post_attention_layernorm/weight": "flax_model-00003-of-00011.msgpack",
"model/layers/9/self_attn/k_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/self_attn/o_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/self_attn/q_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/layers/9/self_attn/v_proj/kernel": "flax_model-00003-of-00011.msgpack",
"model/norm/weight": "flax_model-00011-of-00011.msgpack"
}
}