| { | |
| "metadata": { | |
| "total_size": 11488014336 | |
| }, | |
| "weight_map": { | |
| "layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "lm_head.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.10.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.10.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.18.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.19.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.2.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.20.attn.W_rope_k.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.20.attn.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.20.attn.c_q.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.20.attn.kv_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.20.input_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.20.mlp.c_fc1.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.20.mlp.c_fc2.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.20.mlp.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.20.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.attn.W_rope_k.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.attn.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.attn.c_q.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.attn.kv_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.input_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.mlp.c_fc1.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.mlp.c_fc2.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.mlp.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.21.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.attn.W_rope_k.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.attn.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.attn.c_q.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.attn.kv_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.input_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.mlp.c_fc1.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.mlp.c_fc2.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.mlp.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.22.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.attn.W_rope_k.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.attn.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.attn.c_q.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.attn.kv_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.input_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.mlp.c_fc1.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.mlp.c_fc2.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.mlp.c_proj.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin", | |
| "transformer.h.3.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.input_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.mlp.c_fc2.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.mlp.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.attn.W_rope_k.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.attn.c_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.attn.c_q.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.attn.kv_proj.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.input_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.9.mlp.c_fc1.weight": "pytorch_model-00001-of-00003.bin", | |
| "transformer.h.9.mlp.c_fc2.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.9.mlp.c_proj.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.h.9.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin", | |
| "transformer.wte.weight": "pytorch_model-00001-of-00003.bin" | |
| } | |
| } | |