singularity-s0 commited on
Commit ·
1cc77ff
1
Parent(s): 27825e3
fix: remove phantom keys for unused last 16 RVQ layers from index
Browse files
model-00001-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b0419c2b297a29d68a16ccd3f18f6317332263b056ba912adfa34eec6bc5fd22
|
| 3 |
+
size 4983138608
|
model-00002-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4915961640
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d188c9d2e699636704569e4af7b66acf1feb6a35e9de78e73c8fe2edba8c1a09
|
| 3 |
size 4915961640
|
model-00003-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fc023862c4f8cbecf47fb30cdfe58a8f22298019680de5bffae926ac25ad137f
|
| 3 |
+
size 4974673472
|
model-00004-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9dcd2be961b4c4111af035bdb0b214e2093bac9f796e4be71ed8942a459a1d22
|
| 3 |
+
size 1837264680
|
model.safetensors.index.json
CHANGED
|
@@ -1,41 +1,25 @@
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
-
"total_parameters":
|
| 4 |
-
"total_size":
|
| 5 |
},
|
| 6 |
"weight_map": {
|
| 7 |
-
"emb_ext.0.weight": "model-
|
| 8 |
-
"emb_ext.1.weight": "model-
|
| 9 |
-
"emb_ext.10.weight": "model-
|
| 10 |
-
"emb_ext.11.weight": "model-
|
| 11 |
-
"emb_ext.12.weight": "model-
|
| 12 |
-
"emb_ext.13.weight": "model-
|
| 13 |
-
"emb_ext.14.weight": "model-
|
| 14 |
-
"emb_ext.15.weight": "model-
|
| 15 |
-
"emb_ext.
|
| 16 |
-
"emb_ext.
|
| 17 |
-
"emb_ext.
|
| 18 |
-
"emb_ext.
|
| 19 |
-
"emb_ext.
|
| 20 |
-
"emb_ext.
|
| 21 |
-
"emb_ext.
|
| 22 |
-
"emb_ext.
|
| 23 |
-
"emb_ext.23.weight": "model-00004-of-00004.safetensors",
|
| 24 |
-
"emb_ext.24.weight": "model-00004-of-00004.safetensors",
|
| 25 |
-
"emb_ext.25.weight": "model-00004-of-00004.safetensors",
|
| 26 |
-
"emb_ext.26.weight": "model-00004-of-00004.safetensors",
|
| 27 |
-
"emb_ext.27.weight": "model-00004-of-00004.safetensors",
|
| 28 |
-
"emb_ext.28.weight": "model-00004-of-00004.safetensors",
|
| 29 |
-
"emb_ext.29.weight": "model-00004-of-00004.safetensors",
|
| 30 |
-
"emb_ext.3.weight": "model-00004-of-00004.safetensors",
|
| 31 |
-
"emb_ext.30.weight": "model-00004-of-00004.safetensors",
|
| 32 |
-
"emb_ext.31.weight": "model-00004-of-00004.safetensors",
|
| 33 |
-
"emb_ext.4.weight": "model-00004-of-00004.safetensors",
|
| 34 |
-
"emb_ext.5.weight": "model-00004-of-00004.safetensors",
|
| 35 |
-
"emb_ext.6.weight": "model-00004-of-00004.safetensors",
|
| 36 |
-
"emb_ext.7.weight": "model-00004-of-00004.safetensors",
|
| 37 |
-
"emb_ext.8.weight": "model-00004-of-00004.safetensors",
|
| 38 |
-
"emb_ext.9.weight": "model-00004-of-00004.safetensors",
|
| 39 |
"language_model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 40 |
"language_model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 41 |
"language_model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
|
@@ -202,17 +186,17 @@
|
|
| 202 |
"language_model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 203 |
"language_model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 204 |
"language_model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 205 |
-
"language_model.layers.22.input_layernorm.weight": "model-
|
| 206 |
"language_model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 207 |
"language_model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
"language_model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 209 |
"language_model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 210 |
-
"language_model.layers.22.self_attn.k_norm.weight": "model-
|
| 211 |
-
"language_model.layers.22.self_attn.k_proj.weight": "model-
|
| 212 |
-
"language_model.layers.22.self_attn.o_proj.weight": "model-
|
| 213 |
-
"language_model.layers.22.self_attn.q_norm.weight": "model-
|
| 214 |
-
"language_model.layers.22.self_attn.q_proj.weight": "model-
|
| 215 |
-
"language_model.layers.22.self_attn.v_proj.weight": "model-
|
| 216 |
"language_model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 217 |
"language_model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 218 |
"language_model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
|
@@ -354,19 +338,19 @@
|
|
| 354 |
"language_model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 355 |
"language_model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 356 |
"language_model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 357 |
-
"language_model.layers.34.self_attn.q_proj.weight": "model-
|
| 358 |
-
"language_model.layers.34.self_attn.v_proj.weight": "model-
|
| 359 |
"language_model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 360 |
"language_model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 361 |
"language_model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
|
| 362 |
"language_model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
|
| 363 |
"language_model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 364 |
"language_model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
|
| 365 |
-
"language_model.layers.35.self_attn.k_proj.weight": "model-
|
| 366 |
"language_model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
|
| 367 |
"language_model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
|
| 368 |
-
"language_model.layers.35.self_attn.q_proj.weight": "model-
|
| 369 |
-
"language_model.layers.35.self_attn.v_proj.weight": "model-
|
| 370 |
"language_model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 371 |
"language_model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 372 |
"language_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
|
@@ -422,17 +406,17 @@
|
|
| 422 |
"language_model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 423 |
"language_model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 424 |
"language_model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 425 |
-
"language_model.layers.9.input_layernorm.weight": "model-
|
| 426 |
-
"language_model.layers.9.mlp.down_proj.weight": "model-
|
| 427 |
-
"language_model.layers.9.mlp.gate_proj.weight": "model-
|
| 428 |
"language_model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 429 |
"language_model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 430 |
-
"language_model.layers.9.self_attn.k_norm.weight": "model-
|
| 431 |
-
"language_model.layers.9.self_attn.k_proj.weight": "model-
|
| 432 |
-
"language_model.layers.9.self_attn.o_proj.weight": "model-
|
| 433 |
-
"language_model.layers.9.self_attn.q_norm.weight": "model-
|
| 434 |
-
"language_model.layers.9.self_attn.q_proj.weight": "model-
|
| 435 |
-
"language_model.layers.9.self_attn.v_proj.weight": "model-
|
| 436 |
"language_model.norm.weight": "model-00004-of-00004.safetensors",
|
| 437 |
"lm_heads.0.weight": "model-00004-of-00004.safetensors",
|
| 438 |
"lm_heads.1.weight": "model-00004-of-00004.safetensors",
|
|
@@ -443,24 +427,8 @@
|
|
| 443 |
"lm_heads.14.weight": "model-00004-of-00004.safetensors",
|
| 444 |
"lm_heads.15.weight": "model-00004-of-00004.safetensors",
|
| 445 |
"lm_heads.16.weight": "model-00004-of-00004.safetensors",
|
| 446 |
-
"lm_heads.17.weight": "model-00004-of-00004.safetensors",
|
| 447 |
-
"lm_heads.18.weight": "model-00004-of-00004.safetensors",
|
| 448 |
-
"lm_heads.19.weight": "model-00004-of-00004.safetensors",
|
| 449 |
"lm_heads.2.weight": "model-00004-of-00004.safetensors",
|
| 450 |
-
"lm_heads.20.weight": "model-00004-of-00004.safetensors",
|
| 451 |
-
"lm_heads.21.weight": "model-00004-of-00004.safetensors",
|
| 452 |
-
"lm_heads.22.weight": "model-00004-of-00004.safetensors",
|
| 453 |
-
"lm_heads.23.weight": "model-00004-of-00004.safetensors",
|
| 454 |
-
"lm_heads.24.weight": "model-00004-of-00004.safetensors",
|
| 455 |
-
"lm_heads.25.weight": "model-00004-of-00004.safetensors",
|
| 456 |
-
"lm_heads.26.weight": "model-00004-of-00004.safetensors",
|
| 457 |
-
"lm_heads.27.weight": "model-00004-of-00004.safetensors",
|
| 458 |
-
"lm_heads.28.weight": "model-00004-of-00004.safetensors",
|
| 459 |
-
"lm_heads.29.weight": "model-00004-of-00004.safetensors",
|
| 460 |
"lm_heads.3.weight": "model-00004-of-00004.safetensors",
|
| 461 |
-
"lm_heads.30.weight": "model-00004-of-00004.safetensors",
|
| 462 |
-
"lm_heads.31.weight": "model-00004-of-00004.safetensors",
|
| 463 |
-
"lm_heads.32.weight": "model-00004-of-00004.safetensors",
|
| 464 |
"lm_heads.4.weight": "model-00004-of-00004.safetensors",
|
| 465 |
"lm_heads.5.weight": "model-00004-of-00004.safetensors",
|
| 466 |
"lm_heads.6.weight": "model-00004-of-00004.safetensors",
|
|
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
+
"total_parameters": 8355492864,
|
| 4 |
+
"total_size": 16710985728
|
| 5 |
},
|
| 6 |
"weight_map": {
|
| 7 |
+
"emb_ext.0.weight": "model-00001-of-00004.safetensors",
|
| 8 |
+
"emb_ext.1.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"emb_ext.10.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"emb_ext.11.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"emb_ext.12.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"emb_ext.13.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"emb_ext.14.weight": "model-00001-of-00004.safetensors",
|
| 14 |
+
"emb_ext.15.weight": "model-00001-of-00004.safetensors",
|
| 15 |
+
"emb_ext.2.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"emb_ext.3.weight": "model-00001-of-00004.safetensors",
|
| 17 |
+
"emb_ext.4.weight": "model-00001-of-00004.safetensors",
|
| 18 |
+
"emb_ext.5.weight": "model-00001-of-00004.safetensors",
|
| 19 |
+
"emb_ext.6.weight": "model-00001-of-00004.safetensors",
|
| 20 |
+
"emb_ext.7.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"emb_ext.8.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"emb_ext.9.weight": "model-00001-of-00004.safetensors",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
"language_model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 24 |
"language_model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 25 |
"language_model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
|
|
|
| 186 |
"language_model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 187 |
"language_model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 188 |
"language_model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 189 |
+
"language_model.layers.22.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 190 |
"language_model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
"language_model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
"language_model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 193 |
"language_model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 194 |
+
"language_model.layers.22.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 195 |
+
"language_model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"language_model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 197 |
+
"language_model.layers.22.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 198 |
+
"language_model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 199 |
+
"language_model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 200 |
"language_model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 201 |
"language_model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 202 |
"language_model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
|
|
|
| 338 |
"language_model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 339 |
"language_model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 340 |
"language_model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 341 |
+
"language_model.layers.34.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
|
| 342 |
+
"language_model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
|
| 343 |
"language_model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 344 |
"language_model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 345 |
"language_model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
|
| 346 |
"language_model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
|
| 347 |
"language_model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 348 |
"language_model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
|
| 349 |
+
"language_model.layers.35.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
|
| 350 |
"language_model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
|
| 351 |
"language_model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
|
| 352 |
+
"language_model.layers.35.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
|
| 353 |
+
"language_model.layers.35.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
|
| 354 |
"language_model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 355 |
"language_model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 356 |
"language_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
|
|
|
| 406 |
"language_model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 407 |
"language_model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 408 |
"language_model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 409 |
+
"language_model.layers.9.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 410 |
+
"language_model.layers.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 411 |
+
"language_model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 412 |
"language_model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 413 |
"language_model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 414 |
+
"language_model.layers.9.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 415 |
+
"language_model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 416 |
+
"language_model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 417 |
+
"language_model.layers.9.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 418 |
+
"language_model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 419 |
+
"language_model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 420 |
"language_model.norm.weight": "model-00004-of-00004.safetensors",
|
| 421 |
"lm_heads.0.weight": "model-00004-of-00004.safetensors",
|
| 422 |
"lm_heads.1.weight": "model-00004-of-00004.safetensors",
|
|
|
|
| 427 |
"lm_heads.14.weight": "model-00004-of-00004.safetensors",
|
| 428 |
"lm_heads.15.weight": "model-00004-of-00004.safetensors",
|
| 429 |
"lm_heads.16.weight": "model-00004-of-00004.safetensors",
|
|
|
|
|
|
|
|
|
|
| 430 |
"lm_heads.2.weight": "model-00004-of-00004.safetensors",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 431 |
"lm_heads.3.weight": "model-00004-of-00004.safetensors",
|
|
|
|
|
|
|
|
|
|
| 432 |
"lm_heads.4.weight": "model-00004-of-00004.safetensors",
|
| 433 |
"lm_heads.5.weight": "model-00004-of-00004.safetensors",
|
| 434 |
"lm_heads.6.weight": "model-00004-of-00004.safetensors",
|