| { |
| "model_id": "microsoft/phi-4", |
| "timestamp": "2026-04-21 15:49:11 UTC", |
| "overall": { |
| "n_tensors": 243, |
| "total_elements": 14659507200, |
| "total_bytes": 29319014400, |
| "total_bytes_human": "29.32 GB", |
| "avg_exponent_entropy_bits": 2.5674, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9692, |
| "avg_top_16_coverage": 0.999895, |
| "avg_huffman_bits_per_exponent": 2.6106, |
| "avg_huffman_full_ratio": 0.6632, |
| "avg_huffman_size_reduction_pct": 33.68, |
| "avg_palette_full_ratio": 0.7525, |
| "avg_palette_size_reduction_pct": 24.75, |
| "avg_verbatim_row_fraction": 0.006284 |
| }, |
| "by_category": { |
| "mlp": { |
| "n_tensors": 80, |
| "total_elements": 11010048000, |
| "total_bytes": 22020096000, |
| "total_bytes_human": "22.02 GB", |
| "avg_exponent_entropy_bits": 2.5571, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9689, |
| "avg_top_16_coverage": 0.999906, |
| "avg_huffman_bits_per_exponent": 2.6021, |
| "avg_huffman_full_ratio": 0.6626, |
| "avg_huffman_size_reduction_pct": 33.74, |
| "avg_palette_full_ratio": 0.7524, |
| "avg_palette_size_reduction_pct": 24.76, |
| "avg_verbatim_row_fraction": 0.005745 |
| }, |
| "attention": { |
| "n_tensors": 80, |
| "total_elements": 2621440000, |
| "total_bytes": 5242880000, |
| "total_bytes_human": "5.24 GB", |
| "avg_exponent_entropy_bits": 2.6105, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9695, |
| "avg_top_16_coverage": 0.999857, |
| "avg_huffman_bits_per_exponent": 2.6461, |
| "avg_huffman_full_ratio": 0.6654, |
| "avg_huffman_size_reduction_pct": 33.46, |
| "avg_palette_full_ratio": 0.7531, |
| "avg_palette_size_reduction_pct": 24.69, |
| "avg_verbatim_row_fraction": 0.008469 |
| }, |
| "embedding": { |
| "n_tensors": 2, |
| "total_elements": 1027604480, |
| "total_bytes": 2055208960, |
| "total_bytes_human": "2.06 GB", |
| "avg_exponent_entropy_bits": 2.5688, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9722, |
| "avg_top_16_coverage": 0.999883, |
| "avg_huffman_bits_per_exponent": 2.6112, |
| "avg_huffman_full_ratio": 0.6632, |
| "avg_huffman_size_reduction_pct": 33.68, |
| "avg_palette_full_ratio": 0.7526, |
| "avg_palette_size_reduction_pct": 24.74, |
| "avg_verbatim_row_fraction": 0.00648 |
| }, |
| "layernorm": { |
| "n_tensors": 81, |
| "total_elements": 414720, |
| "total_bytes": 829440, |
| "total_bytes_human": "0.00 GB", |
| "avg_exponent_entropy_bits": 0.2416, |
| "avg_sign_entropy_bits": 0.0001, |
| "avg_mantissa_entropy_bits": 4.6445, |
| "avg_top_16_coverage": 1.0, |
| "avg_huffman_bits_per_exponent": 1.0646, |
| "avg_huffman_full_ratio": 0.5688, |
| "avg_huffman_size_reduction_pct": 43.12, |
| "avg_palette_full_ratio": 0.7525, |
| "avg_palette_size_reduction_pct": 24.75, |
| "avg_verbatim_row_fraction": 0.0 |
| } |
| }, |
| "by_type": { |
| "mlp_down": { |
| "n_tensors": 40, |
| "total_elements": 3670016000, |
| "total_bytes": 7340032000, |
| "total_bytes_human": "7.34 GB", |
| "avg_exponent_entropy_bits": 2.5537, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9687, |
| "avg_top_16_coverage": 0.999904, |
| "avg_huffman_bits_per_exponent": 2.5999, |
| "avg_huffman_full_ratio": 0.6625, |
| "avg_huffman_size_reduction_pct": 33.75, |
| "avg_palette_full_ratio": 0.7524, |
| "avg_palette_size_reduction_pct": 24.76, |
| "avg_verbatim_row_fraction": 0.005836 |
| }, |
| "mlp_gate_up_fused": { |
| "n_tensors": 40, |
| "total_elements": 7340032000, |
| "total_bytes": 14680064000, |
| "total_bytes_human": "14.68 GB", |
| "avg_exponent_entropy_bits": 2.5588, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.969, |
| "avg_top_16_coverage": 0.999907, |
| "avg_huffman_bits_per_exponent": 2.6033, |
| "avg_huffman_full_ratio": 0.6627, |
| "avg_huffman_size_reduction_pct": 33.73, |
| "avg_palette_full_ratio": 0.7524, |
| "avg_palette_size_reduction_pct": 24.76, |
| "avg_verbatim_row_fraction": 0.0057 |
| }, |
| "attn_o": { |
| "n_tensors": 40, |
| "total_elements": 1048576000, |
| "total_bytes": 2097152000, |
| "total_bytes_human": "2.10 GB", |
| "avg_exponent_entropy_bits": 2.5636, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9688, |
| "avg_top_16_coverage": 0.999877, |
| "avg_huffman_bits_per_exponent": 2.6038, |
| "avg_huffman_full_ratio": 0.6627, |
| "avg_huffman_size_reduction_pct": 33.73, |
| "avg_palette_full_ratio": 0.7526, |
| "avg_palette_size_reduction_pct": 24.74, |
| "avg_verbatim_row_fraction": 0.006477 |
| }, |
| "attn_qkv_fused": { |
| "n_tensors": 40, |
| "total_elements": 1572864000, |
| "total_bytes": 3145728000, |
| "total_bytes_human": "3.15 GB", |
| "avg_exponent_entropy_bits": 2.6417, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.97, |
| "avg_top_16_coverage": 0.999845, |
| "avg_huffman_bits_per_exponent": 2.6743, |
| "avg_huffman_full_ratio": 0.6671, |
| "avg_huffman_size_reduction_pct": 33.29, |
| "avg_palette_full_ratio": 0.7534, |
| "avg_palette_size_reduction_pct": 24.66, |
| "avg_verbatim_row_fraction": 0.009797 |
| }, |
| "embedding": { |
| "n_tensors": 1, |
| "total_elements": 513802240, |
| "total_bytes": 1027604480, |
| "total_bytes_human": "1.03 GB", |
| "avg_exponent_entropy_bits": 2.5824, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9723, |
| "avg_top_16_coverage": 0.999852, |
| "avg_huffman_bits_per_exponent": 2.62, |
| "avg_huffman_full_ratio": 0.6637, |
| "avg_huffman_size_reduction_pct": 33.63, |
| "avg_palette_full_ratio": 0.7528, |
| "avg_palette_size_reduction_pct": 24.72, |
| "avg_verbatim_row_fraction": 0.007484 |
| }, |
| "lm_head": { |
| "n_tensors": 1, |
| "total_elements": 513802240, |
| "total_bytes": 1027604480, |
| "total_bytes_human": "1.03 GB", |
| "avg_exponent_entropy_bits": 2.5553, |
| "avg_sign_entropy_bits": 1.0, |
| "avg_mantissa_entropy_bits": 6.9721, |
| "avg_top_16_coverage": 0.999914, |
| "avg_huffman_bits_per_exponent": 2.6025, |
| "avg_huffman_full_ratio": 0.6627, |
| "avg_huffman_size_reduction_pct": 33.73, |
| "avg_palette_full_ratio": 0.7523, |
| "avg_palette_size_reduction_pct": 24.77, |
| "avg_verbatim_row_fraction": 0.005476 |
| } |
| }, |
| "mlp_only_compression": { |
| "mlp_original_bytes": 22020096000, |
| "mlp_compressed_bytes": 14591262458, |
| "non_mlp_bytes": 7298918400, |
| "total_original_bytes": 29319014400, |
| "total_compressed_bytes": 21890180858, |
| "overall_ratio": 0.7466, |
| "overall_size_reduction_pct": 25.34, |
| "mlp_fraction_of_model": 0.7511 |
| }, |
| "per_layer": { |
| "0": { |
| "layernorm": { |
| "exponent_entropy": 0.2135, |
| "huffman_reduction_pct": 43.27, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.552, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.99986, |
| "verbatim_row_fraction": 0.007509 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5515, |
| "huffman_reduction_pct": 33.8, |
| "top_16_coverage": 0.999894, |
| "verbatim_row_fraction": 0.006357 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5593, |
| "huffman_reduction_pct": 33.82, |
| "top_16_coverage": 0.999814, |
| "verbatim_row_fraction": 0.009465 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.7358, |
| "huffman_reduction_pct": 32.74, |
| "top_16_coverage": 0.999614, |
| "verbatim_row_fraction": 0.024329 |
| } |
| }, |
| "1": { |
| "layernorm": { |
| "exponent_entropy": 0.1178, |
| "huffman_reduction_pct": 43.37, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5497, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999894, |
| "verbatim_row_fraction": 0.006556 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.552, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999888, |
| "verbatim_row_fraction": 0.006267 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5618, |
| "huffman_reduction_pct": 33.8, |
| "top_16_coverage": 0.999834, |
| "verbatim_row_fraction": 0.008467 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.753, |
| "huffman_reduction_pct": 32.64, |
| "top_16_coverage": 0.999822, |
| "verbatim_row_fraction": 0.011317 |
| } |
| }, |
| "2": { |
| "layernorm": { |
| "exponent_entropy": 1.0449, |
| "huffman_reduction_pct": 40.92, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5494, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999905, |
| "verbatim_row_fraction": 0.005965 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5538, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999884, |
| "verbatim_row_fraction": 0.006082 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5563, |
| "huffman_reduction_pct": 33.84, |
| "top_16_coverage": 0.99986, |
| "verbatim_row_fraction": 0.007581 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.7164, |
| "huffman_reduction_pct": 32.85, |
| "top_16_coverage": 0.999807, |
| "verbatim_row_fraction": 0.012248 |
| } |
| }, |
| "3": { |
| "layernorm": { |
| "exponent_entropy": 0.2362, |
| "huffman_reduction_pct": 43.19, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5486, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999907, |
| "verbatim_row_fraction": 0.005882 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5537, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999879, |
| "verbatim_row_fraction": 0.005997 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5828, |
| "huffman_reduction_pct": 33.68, |
| "top_16_coverage": 0.999867, |
| "verbatim_row_fraction": 0.007588 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.7166, |
| "huffman_reduction_pct": 32.86, |
| "top_16_coverage": 0.999806, |
| "verbatim_row_fraction": 0.012274 |
| } |
| }, |
| "4": { |
| "layernorm": { |
| "exponent_entropy": 0.1348, |
| "huffman_reduction_pct": 43.31, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5486, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999908, |
| "verbatim_row_fraction": 0.005854 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.554, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999888, |
| "verbatim_row_fraction": 0.006004 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5515, |
| "huffman_reduction_pct": 33.81, |
| "top_16_coverage": 0.999885, |
| "verbatim_row_fraction": 0.006636 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6618, |
| "huffman_reduction_pct": 33.16, |
| "top_16_coverage": 0.999831, |
| "verbatim_row_fraction": 0.010771 |
| } |
| }, |
| "5": { |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5549, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999905, |
| "verbatim_row_fraction": 0.005801 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5714, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999869, |
| "verbatim_row_fraction": 0.007434 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6902, |
| "huffman_reduction_pct": 33.01, |
| "top_16_coverage": 0.999827, |
| "verbatim_row_fraction": 0.010994 |
| }, |
| "layernorm": { |
| "exponent_entropy": 0.1062, |
| "huffman_reduction_pct": 43.4, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5498, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999907, |
| "verbatim_row_fraction": 0.005869 |
| } |
| }, |
| "10": { |
| "layernorm": { |
| "exponent_entropy": 0.6062, |
| "huffman_reduction_pct": 42.67, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5517, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999901, |
| "verbatim_row_fraction": 0.005961 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5631, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999904, |
| "verbatim_row_fraction": 0.005881 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5574, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999892, |
| "verbatim_row_fraction": 0.006487 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6336, |
| "huffman_reduction_pct": 33.32, |
| "top_16_coverage": 0.999843, |
| "verbatim_row_fraction": 0.009971 |
| } |
| }, |
| "11": { |
| "layernorm": { |
| "exponent_entropy": 0.8971, |
| "huffman_reduction_pct": 41.69, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5534, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.9999, |
| "verbatim_row_fraction": 0.005956 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5668, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999899, |
| "verbatim_row_fraction": 0.00614 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.566, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.99989, |
| "verbatim_row_fraction": 0.006382 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6559, |
| "huffman_reduction_pct": 33.21, |
| "top_16_coverage": 0.999837, |
| "verbatim_row_fraction": 0.010353 |
| } |
| }, |
| "12": { |
| "layernorm": { |
| "exponent_entropy": 0.4779, |
| "huffman_reduction_pct": 42.93, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5537, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999902, |
| "verbatim_row_fraction": 0.005771 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5632, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999908, |
| "verbatim_row_fraction": 0.00574 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5654, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999891, |
| "verbatim_row_fraction": 0.00636 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.678, |
| "huffman_reduction_pct": 33.09, |
| "top_16_coverage": 0.99983, |
| "verbatim_row_fraction": 0.010841 |
| } |
| }, |
| "6": { |
| "layernorm": { |
| "exponent_entropy": 0.091, |
| "huffman_reduction_pct": 43.47, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5493, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005875 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.554, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999911, |
| "verbatim_row_fraction": 0.005602 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5729, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999881, |
| "verbatim_row_fraction": 0.006934 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6846, |
| "huffman_reduction_pct": 33.05, |
| "top_16_coverage": 0.99982, |
| "verbatim_row_fraction": 0.011445 |
| } |
| }, |
| "7": { |
| "layernorm": { |
| "exponent_entropy": 0.0787, |
| "huffman_reduction_pct": 43.49, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.549, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999908, |
| "verbatim_row_fraction": 0.005763 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5542, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.99991, |
| "verbatim_row_fraction": 0.005642 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5572, |
| "huffman_reduction_pct": 33.8, |
| "top_16_coverage": 0.999891, |
| "verbatim_row_fraction": 0.006523 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.671, |
| "huffman_reduction_pct": 33.12, |
| "top_16_coverage": 0.999828, |
| "verbatim_row_fraction": 0.010926 |
| } |
| }, |
| "8": { |
| "layernorm": { |
| "exponent_entropy": 0.0721, |
| "huffman_reduction_pct": 43.5, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5489, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999909, |
| "verbatim_row_fraction": 0.005679 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5549, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999913, |
| "verbatim_row_fraction": 0.005487 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.56, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999884, |
| "verbatim_row_fraction": 0.006631 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6696, |
| "huffman_reduction_pct": 33.13, |
| "top_16_coverage": 0.999831, |
| "verbatim_row_fraction": 0.010763 |
| } |
| }, |
| "9": { |
| "layernorm": { |
| "exponent_entropy": 0.0511, |
| "huffman_reduction_pct": 43.57, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5497, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999903, |
| "verbatim_row_fraction": 0.006 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5588, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999907, |
| "verbatim_row_fraction": 0.00574 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5575, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999896, |
| "verbatim_row_fraction": 0.006294 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6389, |
| "huffman_reduction_pct": 33.29, |
| "top_16_coverage": 0.99984, |
| "verbatim_row_fraction": 0.010176 |
| } |
| }, |
| "13": { |
| "layernorm": { |
| "exponent_entropy": 0.3824, |
| "huffman_reduction_pct": 43.1, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.555, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999893, |
| "verbatim_row_fraction": 0.005844 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5643, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005783 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.567, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.999886, |
| "verbatim_row_fraction": 0.006089 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6362, |
| "huffman_reduction_pct": 33.31, |
| "top_16_coverage": 0.999841, |
| "verbatim_row_fraction": 0.010117 |
| } |
| }, |
| "14": { |
| "layernorm": { |
| "exponent_entropy": 0.2836, |
| "huffman_reduction_pct": 43.25, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.555, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999896, |
| "verbatim_row_fraction": 0.005914 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5647, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999897, |
| "verbatim_row_fraction": 0.006102 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5669, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.99989, |
| "verbatim_row_fraction": 0.006069 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6555, |
| "huffman_reduction_pct": 33.21, |
| "top_16_coverage": 0.999839, |
| "verbatim_row_fraction": 0.010249 |
| } |
| }, |
| "15": { |
| "layernorm": { |
| "exponent_entropy": 0.232, |
| "huffman_reduction_pct": 43.32, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5559, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999895, |
| "verbatim_row_fraction": 0.005887 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5647, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.9999, |
| "verbatim_row_fraction": 0.006108 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5614, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999887, |
| "verbatim_row_fraction": 0.00627 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6499, |
| "huffman_reduction_pct": 33.24, |
| "top_16_coverage": 0.99984, |
| "verbatim_row_fraction": 0.010192 |
| } |
| }, |
| "16": { |
| "layernorm": { |
| "exponent_entropy": 0.1846, |
| "huffman_reduction_pct": 43.38, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5568, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999898, |
| "verbatim_row_fraction": 0.006057 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5687, |
| "huffman_reduction_pct": 33.67, |
| "top_16_coverage": 0.999886, |
| "verbatim_row_fraction": 0.006397 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5633, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999896, |
| "verbatim_row_fraction": 0.005996 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6561, |
| "huffman_reduction_pct": 33.21, |
| "top_16_coverage": 0.999833, |
| "verbatim_row_fraction": 0.010661 |
| } |
| }, |
| "17": { |
| "layernorm": { |
| "exponent_entropy": 0.0986, |
| "huffman_reduction_pct": 43.47, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5558, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999902, |
| "verbatim_row_fraction": 0.005951 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5655, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999899, |
| "verbatim_row_fraction": 0.006078 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5705, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999893, |
| "verbatim_row_fraction": 0.006248 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6704, |
| "huffman_reduction_pct": 33.13, |
| "top_16_coverage": 0.99983, |
| "verbatim_row_fraction": 0.010783 |
| } |
| }, |
| "18": { |
| "layernorm": { |
| "exponent_entropy": 0.0849, |
| "huffman_reduction_pct": 43.49, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5555, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999902, |
| "verbatim_row_fraction": 0.005965 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5643, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999899, |
| "verbatim_row_fraction": 0.00611 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5775, |
| "huffman_reduction_pct": 33.63, |
| "top_16_coverage": 0.999886, |
| "verbatim_row_fraction": 0.006226 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6571, |
| "huffman_reduction_pct": 33.21, |
| "top_16_coverage": 0.999834, |
| "verbatim_row_fraction": 0.010542 |
| } |
| }, |
| "19": { |
| "layernorm": { |
| "exponent_entropy": 0.0768, |
| "huffman_reduction_pct": 43.49, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5544, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005751 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5624, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.999904, |
| "verbatim_row_fraction": 0.005974 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5636, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.99989, |
| "verbatim_row_fraction": 0.005742 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6313, |
| "huffman_reduction_pct": 33.35, |
| "top_16_coverage": 0.999839, |
| "verbatim_row_fraction": 0.01027 |
| } |
| }, |
| "20": { |
| "attn_o": { |
| "exponent_entropy": 2.5616, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999903, |
| "verbatim_row_fraction": 0.005554 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6148, |
| "huffman_reduction_pct": 33.43, |
| "top_16_coverage": 0.999856, |
| "verbatim_row_fraction": 0.008743 |
| }, |
| "layernorm": { |
| "exponent_entropy": 0.0566, |
| "huffman_reduction_pct": 43.51, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.555, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999911, |
| "verbatim_row_fraction": 0.005604 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5639, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005842 |
| } |
| }, |
| "21": { |
| "layernorm": { |
| "exponent_entropy": 0.0542, |
| "huffman_reduction_pct": 43.51, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.554, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999908, |
| "verbatim_row_fraction": 0.005744 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5634, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005859 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5683, |
| "huffman_reduction_pct": 33.66, |
| "top_16_coverage": 0.9999, |
| "verbatim_row_fraction": 0.005713 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6277, |
| "huffman_reduction_pct": 33.37, |
| "top_16_coverage": 0.999843, |
| "verbatim_row_fraction": 0.009678 |
| } |
| }, |
| "22": { |
| "layernorm": { |
| "exponent_entropy": 0.0464, |
| "huffman_reduction_pct": 43.52, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5537, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.99991, |
| "verbatim_row_fraction": 0.005663 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5635, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999907, |
| "verbatim_row_fraction": 0.005871 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5712, |
| "huffman_reduction_pct": 33.64, |
| "top_16_coverage": 0.999899, |
| "verbatim_row_fraction": 0.005764 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6357, |
| "huffman_reduction_pct": 33.33, |
| "top_16_coverage": 0.999849, |
| "verbatim_row_fraction": 0.009336 |
| } |
| }, |
| "23": { |
| "layernorm": { |
| "exponent_entropy": 0.0458, |
| "huffman_reduction_pct": 43.52, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.553, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999912, |
| "verbatim_row_fraction": 0.005522 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5626, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005823 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.558, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.999896, |
| "verbatim_row_fraction": 0.005642 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6116, |
| "huffman_reduction_pct": 33.46, |
| "top_16_coverage": 0.999856, |
| "verbatim_row_fraction": 0.008739 |
| } |
| }, |
| "24": { |
| "layernorm": { |
| "exponent_entropy": 0.0458, |
| "huffman_reduction_pct": 43.52, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5523, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999914, |
| "verbatim_row_fraction": 0.005425 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5616, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999911, |
| "verbatim_row_fraction": 0.005653 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.559, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005579 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6255, |
| "huffman_reduction_pct": 33.39, |
| "top_16_coverage": 0.999859, |
| "verbatim_row_fraction": 0.008685 |
| } |
| }, |
| "25": { |
| "layernorm": { |
| "exponent_entropy": 0.0426, |
| "huffman_reduction_pct": 43.52, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5521, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.999911, |
| "verbatim_row_fraction": 0.005604 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5618, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999912, |
| "verbatim_row_fraction": 0.005615 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5647, |
| "huffman_reduction_pct": 33.7, |
| "top_16_coverage": 0.999889, |
| "verbatim_row_fraction": 0.005696 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6131, |
| "huffman_reduction_pct": 33.45, |
| "top_16_coverage": 0.999873, |
| "verbatim_row_fraction": 0.007912 |
| } |
| }, |
| "26": { |
| "layernorm": { |
| "exponent_entropy": 0.0387, |
| "huffman_reduction_pct": 43.53, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5512, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999913, |
| "verbatim_row_fraction": 0.005522 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5608, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999912, |
| "verbatim_row_fraction": 0.005592 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.558, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999906, |
| "verbatim_row_fraction": 0.005356 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6189, |
| "huffman_reduction_pct": 33.43, |
| "top_16_coverage": 0.999875, |
| "verbatim_row_fraction": 0.007861 |
| } |
| }, |
| "27": { |
| "attn_o": { |
| "exponent_entropy": 2.5594, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999898, |
| "verbatim_row_fraction": 0.005244 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6183, |
| "huffman_reduction_pct": 33.43, |
| "top_16_coverage": 0.999874, |
| "verbatim_row_fraction": 0.007879 |
| }, |
| "layernorm": { |
| "exponent_entropy": 0.0377, |
| "huffman_reduction_pct": 43.53, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5496, |
| "huffman_reduction_pct": 33.78, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005278 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.557, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005327 |
| } |
| }, |
| "28": { |
| "layernorm": { |
| "exponent_entropy": 0.0404, |
| "huffman_reduction_pct": 43.47, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5488, |
| "huffman_reduction_pct": 33.78, |
| "top_16_coverage": 0.999918, |
| "verbatim_row_fraction": 0.005253 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5554, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999916, |
| "verbatim_row_fraction": 0.00534 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5585, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.999902, |
| "verbatim_row_fraction": 0.005117 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6374, |
| "huffman_reduction_pct": 33.33, |
| "top_16_coverage": 0.999871, |
| "verbatim_row_fraction": 0.008117 |
| } |
| }, |
| "29": { |
| "layernorm": { |
| "exponent_entropy": 0.1296, |
| "huffman_reduction_pct": 43.38, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.548, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999915, |
| "verbatim_row_fraction": 0.005398 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5536, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999919, |
| "verbatim_row_fraction": 0.005179 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5576, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999883, |
| "verbatim_row_fraction": 0.005383 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.595, |
| "huffman_reduction_pct": 33.55, |
| "top_16_coverage": 0.999878, |
| "verbatim_row_fraction": 0.007557 |
| } |
| }, |
| "30": { |
| "layernorm": { |
| "exponent_entropy": 0.4956, |
| "huffman_reduction_pct": 42.85, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5477, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005279 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5528, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999918, |
| "verbatim_row_fraction": 0.005238 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5567, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999899, |
| "verbatim_row_fraction": 0.005417 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5895, |
| "huffman_reduction_pct": 33.58, |
| "top_16_coverage": 0.999884, |
| "verbatim_row_fraction": 0.007244 |
| } |
| }, |
| "31": { |
| "layernorm": { |
| "exponent_entropy": 0.1291, |
| "huffman_reduction_pct": 43.39, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5476, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005256 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.552, |
| "huffman_reduction_pct": 33.78, |
| "top_16_coverage": 0.99992, |
| "verbatim_row_fraction": 0.005106 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5602, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999885, |
| "verbatim_row_fraction": 0.005356 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6135, |
| "huffman_reduction_pct": 33.45, |
| "top_16_coverage": 0.999875, |
| "verbatim_row_fraction": 0.007858 |
| } |
| }, |
| "32": { |
| "layernorm": { |
| "exponent_entropy": 0.0873, |
| "huffman_reduction_pct": 43.43, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5481, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999919, |
| "verbatim_row_fraction": 0.00516 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5517, |
| "huffman_reduction_pct": 33.78, |
| "top_16_coverage": 0.999919, |
| "verbatim_row_fraction": 0.005182 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5638, |
| "huffman_reduction_pct": 33.68, |
| "top_16_coverage": 0.999888, |
| "verbatim_row_fraction": 0.005012 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6423, |
| "huffman_reduction_pct": 33.29, |
| "top_16_coverage": 0.999847, |
| "verbatim_row_fraction": 0.009709 |
| } |
| }, |
| "33": { |
| "layernorm": { |
| "exponent_entropy": 0.0779, |
| "huffman_reduction_pct": 43.39, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5477, |
| "huffman_reduction_pct": 33.8, |
| "top_16_coverage": 0.999919, |
| "verbatim_row_fraction": 0.005173 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5521, |
| "huffman_reduction_pct": 33.78, |
| "top_16_coverage": 0.999921, |
| "verbatim_row_fraction": 0.005022 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.561, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999872, |
| "verbatim_row_fraction": 0.005449 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5956, |
| "huffman_reduction_pct": 33.54, |
| "top_16_coverage": 0.999876, |
| "verbatim_row_fraction": 0.007881 |
| } |
| }, |
| "34": { |
| "attn_o": { |
| "exponent_entropy": 2.5607, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999884, |
| "verbatim_row_fraction": 0.005105 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6061, |
| "huffman_reduction_pct": 33.49, |
| "top_16_coverage": 0.999876, |
| "verbatim_row_fraction": 0.007858 |
| }, |
| "layernorm": { |
| "exponent_entropy": 0.0739, |
| "huffman_reduction_pct": 43.44, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5491, |
| "huffman_reduction_pct": 33.79, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005278 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5527, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.99992, |
| "verbatim_row_fraction": 0.005075 |
| } |
| }, |
| "35": { |
| "layernorm": { |
| "exponent_entropy": 0.0702, |
| "huffman_reduction_pct": 43.45, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5521, |
| "huffman_reduction_pct": 33.77, |
| "top_16_coverage": 0.999914, |
| "verbatim_row_fraction": 0.005479 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.555, |
| "huffman_reduction_pct": 33.76, |
| "top_16_coverage": 0.99992, |
| "verbatim_row_fraction": 0.005137 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5677, |
| "huffman_reduction_pct": 33.67, |
| "top_16_coverage": 0.999883, |
| "verbatim_row_fraction": 0.005298 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.6091, |
| "huffman_reduction_pct": 33.47, |
| "top_16_coverage": 0.999864, |
| "verbatim_row_fraction": 0.008477 |
| } |
| }, |
| "36": { |
| "layernorm": { |
| "exponent_entropy": 0.0673, |
| "huffman_reduction_pct": 43.45, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5588, |
| "huffman_reduction_pct": 33.74, |
| "top_16_coverage": 0.99991, |
| "verbatim_row_fraction": 0.005687 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5575, |
| "huffman_reduction_pct": 33.75, |
| "top_16_coverage": 0.999918, |
| "verbatim_row_fraction": 0.005209 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5618, |
| "huffman_reduction_pct": 33.72, |
| "top_16_coverage": 0.999866, |
| "verbatim_row_fraction": 0.005564 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5974, |
| "huffman_reduction_pct": 33.53, |
| "top_16_coverage": 0.999881, |
| "verbatim_row_fraction": 0.007572 |
| } |
| }, |
| "37": { |
| "layernorm": { |
| "exponent_entropy": 0.0628, |
| "huffman_reduction_pct": 43.5, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5679, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999905, |
| "verbatim_row_fraction": 0.005956 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5605, |
| "huffman_reduction_pct": 33.73, |
| "top_16_coverage": 0.999917, |
| "verbatim_row_fraction": 0.005285 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5689, |
| "huffman_reduction_pct": 33.68, |
| "top_16_coverage": 0.999827, |
| "verbatim_row_fraction": 0.009551 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5966, |
| "huffman_reduction_pct": 33.54, |
| "top_16_coverage": 0.999888, |
| "verbatim_row_fraction": 0.007118 |
| } |
| }, |
| "38": { |
| "layernorm": { |
| "exponent_entropy": 0.0696, |
| "huffman_reduction_pct": 43.56, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5838, |
| "huffman_reduction_pct": 33.6, |
| "top_16_coverage": 0.999841, |
| "verbatim_row_fraction": 0.007299 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5633, |
| "huffman_reduction_pct": 33.71, |
| "top_16_coverage": 0.999913, |
| "verbatim_row_fraction": 0.005528 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5688, |
| "huffman_reduction_pct": 33.68, |
| "top_16_coverage": 0.999803, |
| "verbatim_row_fraction": 0.010249 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5797, |
| "huffman_reduction_pct": 33.62, |
| "top_16_coverage": 0.999892, |
| "verbatim_row_fraction": 0.0068 |
| } |
| }, |
| "39": { |
| "layernorm": { |
| "exponent_entropy": 0.0895, |
| "huffman_reduction_pct": 43.36, |
| "top_16_coverage": 1.0, |
| "verbatim_row_fraction": 0.0 |
| }, |
| "mlp_down": { |
| "exponent_entropy": 2.5843, |
| "huffman_reduction_pct": 33.62, |
| "top_16_coverage": 0.999858, |
| "verbatim_row_fraction": 0.007856 |
| }, |
| "mlp_gate_up_fused": { |
| "exponent_entropy": 2.5625, |
| "huffman_reduction_pct": 33.69, |
| "top_16_coverage": 0.999903, |
| "verbatim_row_fraction": 0.00583 |
| }, |
| "attn_o": { |
| "exponent_entropy": 2.5694, |
| "huffman_reduction_pct": 33.65, |
| "top_16_coverage": 0.999697, |
| "verbatim_row_fraction": 0.011592 |
| }, |
| "attn_qkv_fused": { |
| "exponent_entropy": 2.5787, |
| "huffman_reduction_pct": 33.61, |
| "top_16_coverage": 0.999876, |
| "verbatim_row_fraction": 0.007645 |
| } |
| } |
| }, |
| "tensor_details": [ |
| { |
| "name": "model.embed_tokens.weight", |
| "tensor_type": "embedding", |
| "category": "embedding", |
| "layer_index": -1, |
| "shape": [ |
| 100352, |
| 5120 |
| ], |
| "n_elements": 513802240, |
| "original_bytes": 1027604480, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5824, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9723, |
| "unique_exponents": 36, |
| "top_8_exponent_coverage": 0.985829, |
| "top_16_exponent_coverage": 0.999852, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 1346136570, |
| "huffman_bits_per_exponent": 2.62, |
| "huffman_full_ratio": 0.6637, |
| "huffman_size_reduction_pct": 33.63, |
| "palette_element_coverage": 0.999852, |
| "palette_row_fraction": 0.992516, |
| "verbatim_row_fraction": 0.007484, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7528, |
| "palette_size_reduction_pct": 24.72 |
| }, |
| { |
| "name": "model.layers.0.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 0, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 1.2695, |
| "sign_entropy_bits": 0.0092, |
| "mantissa_entropy_bits": 6.8703, |
| "unique_exponents": 14, |
| "top_8_exponent_coverage": 0.99707, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 7501, |
| "huffman_bits_per_exponent": 1.465, |
| "huffman_full_ratio": 0.5984, |
| "huffman_size_reduction_pct": 40.16, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.0.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 0, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.552, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9716, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988088, |
| "top_16_exponent_coverage": 0.99986, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238258009, |
| "huffman_bits_per_exponent": 2.5968, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.99986, |
| "palette_row_fraction": 0.992491, |
| "verbatim_row_fraction": 0.007509, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.0.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 0, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5515, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9722, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.987798, |
| "top_16_exponent_coverage": 0.999894, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 475589270, |
| "huffman_bits_per_exponent": 2.5918, |
| "huffman_full_ratio": 0.662, |
| "huffman_size_reduction_pct": 33.8, |
| "palette_element_coverage": 0.999894, |
| "palette_row_fraction": 0.993643, |
| "verbatim_row_fraction": 0.006357, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.0.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 0, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.2135, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9491, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5316, |
| "huffman_bits_per_exponent": 1.0383, |
| "huffman_full_ratio": 0.5673, |
| "huffman_size_reduction_pct": 43.27, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.0.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 0, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5593, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.972, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.985205, |
| "top_16_exponent_coverage": 0.999814, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67848591, |
| "huffman_bits_per_exponent": 2.5882, |
| "huffman_full_ratio": 0.6618, |
| "huffman_size_reduction_pct": 33.82, |
| "palette_element_coverage": 0.999814, |
| "palette_row_fraction": 0.990535, |
| "verbatim_row_fraction": 0.009465, |
| "palette_exponent_ratio": 0.5067, |
| "palette_full_ratio": 0.7533, |
| "palette_size_reduction_pct": 24.67 |
| }, |
| { |
| "name": "model.layers.0.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 0, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.7358, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9723, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.974422, |
| "top_16_exponent_coverage": 0.999614, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 108589785, |
| "huffman_bits_per_exponent": 2.7616, |
| "huffman_full_ratio": 0.6726, |
| "huffman_size_reduction_pct": 32.74, |
| "palette_element_coverage": 0.999614, |
| "palette_row_fraction": 0.975671, |
| "verbatim_row_fraction": 0.024329, |
| "palette_exponent_ratio": 0.5141, |
| "palette_full_ratio": 0.7571, |
| "palette_size_reduction_pct": 24.29 |
| }, |
| { |
| "name": "model.layers.1.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 1, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.8064, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 6.9293, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 6221, |
| "huffman_bits_per_exponent": 1.215, |
| "huffman_full_ratio": 0.5779, |
| "huffman_size_reduction_pct": 42.21, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.1.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 1, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5497, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9713, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988343, |
| "top_16_exponent_coverage": 0.999894, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238235092, |
| "huffman_bits_per_exponent": 2.5966, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999894, |
| "palette_row_fraction": 0.993444, |
| "verbatim_row_fraction": 0.006556, |
| "palette_exponent_ratio": 0.5052, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.1.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 1, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.552, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9717, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988215, |
| "top_16_exponent_coverage": 0.999888, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476919017, |
| "huffman_bits_per_exponent": 2.599, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999888, |
| "palette_row_fraction": 0.993733, |
| "verbatim_row_fraction": 0.006267, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.1.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 1, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1178, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.4424, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5228, |
| "huffman_bits_per_exponent": 1.0211, |
| "huffman_full_ratio": 0.5663, |
| "huffman_size_reduction_pct": 43.37, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.1.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 1, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5618, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9717, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.985438, |
| "top_16_exponent_coverage": 0.999834, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67954545, |
| "huffman_bits_per_exponent": 2.5923, |
| "huffman_full_ratio": 0.662, |
| "huffman_size_reduction_pct": 33.8, |
| "palette_element_coverage": 0.999834, |
| "palette_row_fraction": 0.991533, |
| "verbatim_row_fraction": 0.008467, |
| "palette_exponent_ratio": 0.5062, |
| "palette_full_ratio": 0.7531, |
| "palette_size_reduction_pct": 24.69 |
| }, |
| { |
| "name": "model.layers.1.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 1, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.753, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9721, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.976471, |
| "top_16_exponent_coverage": 0.999822, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 109237443, |
| "huffman_bits_per_exponent": 2.7781, |
| "huffman_full_ratio": 0.6736, |
| "huffman_size_reduction_pct": 32.64, |
| "palette_element_coverage": 0.999822, |
| "palette_row_fraction": 0.988683, |
| "verbatim_row_fraction": 0.011317, |
| "palette_exponent_ratio": 0.5076, |
| "palette_full_ratio": 0.7538, |
| "palette_size_reduction_pct": 24.62 |
| }, |
| { |
| "name": "model.layers.2.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 2, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.143, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 6.2633, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5231, |
| "huffman_bits_per_exponent": 1.0217, |
| "huffman_full_ratio": 0.5668, |
| "huffman_size_reduction_pct": 43.32, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.2.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 2, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5494, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9712, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988447, |
| "top_16_exponent_coverage": 0.999905, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238373341, |
| "huffman_bits_per_exponent": 2.5981, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999905, |
| "palette_row_fraction": 0.994035, |
| "verbatim_row_fraction": 0.005965, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.2.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 2, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5538, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9715, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988277, |
| "top_16_exponent_coverage": 0.999884, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477555064, |
| "huffman_bits_per_exponent": 2.6025, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999884, |
| "palette_row_fraction": 0.993918, |
| "verbatim_row_fraction": 0.006082, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.2.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 2, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 1.0449, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9569, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 7200, |
| "huffman_bits_per_exponent": 1.4062, |
| "huffman_full_ratio": 0.5908, |
| "huffman_size_reduction_pct": 40.92, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.2.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 2, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5563, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9718, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.986088, |
| "top_16_exponent_coverage": 0.99986, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67798605, |
| "huffman_bits_per_exponent": 2.5863, |
| "huffman_full_ratio": 0.6616, |
| "huffman_size_reduction_pct": 33.84, |
| "palette_element_coverage": 0.99986, |
| "palette_row_fraction": 0.992419, |
| "verbatim_row_fraction": 0.007581, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.2.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 2, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.7164, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.972, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.977699, |
| "top_16_exponent_coverage": 0.999807, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 107901369, |
| "huffman_bits_per_exponent": 2.7441, |
| "huffman_full_ratio": 0.6715, |
| "huffman_size_reduction_pct": 32.85, |
| "palette_element_coverage": 0.999807, |
| "palette_row_fraction": 0.987752, |
| "verbatim_row_fraction": 0.012248, |
| "palette_exponent_ratio": 0.5081, |
| "palette_full_ratio": 0.754, |
| "palette_size_reduction_pct": 24.6 |
| }, |
| { |
| "name": "model.layers.3.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 3, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 1.0102, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 6.6358, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 7202, |
| "huffman_bits_per_exponent": 1.4066, |
| "huffman_full_ratio": 0.5908, |
| "huffman_size_reduction_pct": 40.92, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.3.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 3, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5486, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9711, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988465, |
| "top_16_exponent_coverage": 0.999907, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238241309, |
| "huffman_bits_per_exponent": 2.5966, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999907, |
| "palette_row_fraction": 0.994118, |
| "verbatim_row_fraction": 0.005882, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.3.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 3, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5537, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9713, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988275, |
| "top_16_exponent_coverage": 0.999879, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477301285, |
| "huffman_bits_per_exponent": 2.6011, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999879, |
| "palette_row_fraction": 0.994003, |
| "verbatim_row_fraction": 0.005997, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.3.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 3, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.2362, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5898, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5340, |
| "huffman_bits_per_exponent": 1.043, |
| "huffman_full_ratio": 0.5681, |
| "huffman_size_reduction_pct": 43.19, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.3.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 3, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5828, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9717, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.985429, |
| "top_16_exponent_coverage": 0.999867, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68469043, |
| "huffman_bits_per_exponent": 2.6119, |
| "huffman_full_ratio": 0.6632, |
| "huffman_size_reduction_pct": 33.68, |
| "palette_element_coverage": 0.999867, |
| "palette_row_fraction": 0.992412, |
| "verbatim_row_fraction": 0.007588, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.3.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 3, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.7166, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9719, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.976779, |
| "top_16_exponent_coverage": 0.999806, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 107805165, |
| "huffman_bits_per_exponent": 2.7416, |
| "huffman_full_ratio": 0.6714, |
| "huffman_size_reduction_pct": 32.86, |
| "palette_element_coverage": 0.999806, |
| "palette_row_fraction": 0.987726, |
| "verbatim_row_fraction": 0.012274, |
| "palette_exponent_ratio": 0.5081, |
| "palette_full_ratio": 0.754, |
| "palette_size_reduction_pct": 24.6 |
| }, |
| { |
| "name": "model.layers.4.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 4, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.2896, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.7088, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5378, |
| "huffman_bits_per_exponent": 1.0504, |
| "huffman_full_ratio": 0.5681, |
| "huffman_size_reduction_pct": 43.19, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.4.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 4, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5486, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9709, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988485, |
| "top_16_exponent_coverage": 0.999908, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238309448, |
| "huffman_bits_per_exponent": 2.5974, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999908, |
| "palette_row_fraction": 0.994146, |
| "verbatim_row_fraction": 0.005854, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.4.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 4, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.554, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9712, |
| "unique_exponents": 36, |
| "top_8_exponent_coverage": 0.988291, |
| "top_16_exponent_coverage": 0.999888, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477274997, |
| "huffman_bits_per_exponent": 2.6009, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999888, |
| "palette_row_fraction": 0.993996, |
| "verbatim_row_fraction": 0.006004, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.4.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 4, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1348, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5233, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5240, |
| "huffman_bits_per_exponent": 1.0234, |
| "huffman_full_ratio": 0.5669, |
| "huffman_size_reduction_pct": 43.31, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.4.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 4, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5515, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9714, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987296, |
| "top_16_exponent_coverage": 0.999885, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67905296, |
| "huffman_bits_per_exponent": 2.5904, |
| "huffman_full_ratio": 0.6619, |
| "huffman_size_reduction_pct": 33.81, |
| "palette_element_coverage": 0.999885, |
| "palette_row_fraction": 0.993364, |
| "verbatim_row_fraction": 0.006636, |
| "palette_exponent_ratio": 0.5053, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.4.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 4, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6618, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9717, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.979777, |
| "top_16_exponent_coverage": 0.999831, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105963138, |
| "huffman_bits_per_exponent": 2.6948, |
| "huffman_full_ratio": 0.6684, |
| "huffman_size_reduction_pct": 33.16, |
| "palette_element_coverage": 0.999831, |
| "palette_row_fraction": 0.989229, |
| "verbatim_row_fraction": 0.010771, |
| "palette_exponent_ratio": 0.5073, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.5.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 5, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5549, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.971, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988264, |
| "top_16_exponent_coverage": 0.999905, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477360622, |
| "huffman_bits_per_exponent": 2.6014, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999905, |
| "palette_row_fraction": 0.994199, |
| "verbatim_row_fraction": 0.005801, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.5.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 5, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5714, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9715, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.986226, |
| "top_16_exponent_coverage": 0.999869, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68194058, |
| "huffman_bits_per_exponent": 2.6014, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999869, |
| "palette_row_fraction": 0.992566, |
| "verbatim_row_fraction": 0.007434, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7528, |
| "palette_size_reduction_pct": 24.72 |
| }, |
| { |
| "name": "model.layers.5.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 5, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6902, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9719, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.978466, |
| "top_16_exponent_coverage": 0.999827, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106888285, |
| "huffman_bits_per_exponent": 2.7183, |
| "huffman_full_ratio": 0.6699, |
| "huffman_size_reduction_pct": 33.01, |
| "palette_element_coverage": 0.999827, |
| "palette_row_fraction": 0.989006, |
| "verbatim_row_fraction": 0.010994, |
| "palette_exponent_ratio": 0.5075, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.10.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 10, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0512, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9019, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5156, |
| "huffman_bits_per_exponent": 1.007, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.10.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 10, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5517, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988377, |
| "top_16_exponent_coverage": 0.999901, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238381487, |
| "huffman_bits_per_exponent": 2.5982, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999901, |
| "palette_row_fraction": 0.994039, |
| "verbatim_row_fraction": 0.005961, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.10.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 10, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5631, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.97, |
| "unique_exponents": 36, |
| "top_8_exponent_coverage": 0.987591, |
| "top_16_exponent_coverage": 0.999904, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478463329, |
| "huffman_bits_per_exponent": 2.6074, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999904, |
| "palette_row_fraction": 0.994119, |
| "verbatim_row_fraction": 0.005881, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.10.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 10, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.6062, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9984, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5844, |
| "huffman_bits_per_exponent": 1.1414, |
| "huffman_full_ratio": 0.5733, |
| "huffman_size_reduction_pct": 42.67, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.10.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 10, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5574, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9698, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.987346, |
| "top_16_exponent_coverage": 0.999892, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68061368, |
| "huffman_bits_per_exponent": 2.5963, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999892, |
| "palette_row_fraction": 0.993513, |
| "verbatim_row_fraction": 0.006487, |
| "palette_exponent_ratio": 0.5052, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.10.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 10, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6336, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.971, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98017, |
| "top_16_exponent_coverage": 0.999843, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104946906, |
| "huffman_bits_per_exponent": 2.6689, |
| "huffman_full_ratio": 0.6668, |
| "huffman_size_reduction_pct": 33.32, |
| "palette_element_coverage": 0.999843, |
| "palette_row_fraction": 0.990029, |
| "verbatim_row_fraction": 0.009971, |
| "palette_exponent_ratio": 0.5069, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.11.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 11, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0502, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0541, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5155, |
| "huffman_bits_per_exponent": 1.0068, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.11.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 11, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5534, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9691, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988349, |
| "top_16_exponent_coverage": 0.9999, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238549154, |
| "huffman_bits_per_exponent": 2.6, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.9999, |
| "palette_row_fraction": 0.994044, |
| "verbatim_row_fraction": 0.005956, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.11.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 11, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5668, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987346, |
| "top_16_exponent_coverage": 0.999899, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478997069, |
| "huffman_bits_per_exponent": 2.6103, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999899, |
| "palette_row_fraction": 0.99386, |
| "verbatim_row_fraction": 0.00614, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.11.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 11, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.8971, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5957, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 6644, |
| "huffman_bits_per_exponent": 1.2977, |
| "huffman_full_ratio": 0.5831, |
| "huffman_size_reduction_pct": 41.69, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.11.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 11, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.566, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9688, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.987215, |
| "top_16_exponent_coverage": 0.99989, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68258493, |
| "huffman_bits_per_exponent": 2.6039, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.99989, |
| "palette_row_fraction": 0.993618, |
| "verbatim_row_fraction": 0.006382, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.11.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 11, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6559, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9705, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.979295, |
| "top_16_exponent_coverage": 0.999837, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105646624, |
| "huffman_bits_per_exponent": 2.6867, |
| "huffman_full_ratio": 0.6679, |
| "huffman_size_reduction_pct": 33.21, |
| "palette_element_coverage": 0.999837, |
| "palette_row_fraction": 0.989647, |
| "verbatim_row_fraction": 0.010353, |
| "palette_exponent_ratio": 0.5071, |
| "palette_full_ratio": 0.7536, |
| "palette_size_reduction_pct": 24.64 |
| }, |
| { |
| "name": "model.layers.12.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 12, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0516, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.3792, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5155, |
| "huffman_bits_per_exponent": 1.0068, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.12.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 12, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5537, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.969, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988363, |
| "top_16_exponent_coverage": 0.999902, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238684202, |
| "huffman_bits_per_exponent": 2.6015, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999902, |
| "palette_row_fraction": 0.994229, |
| "verbatim_row_fraction": 0.005771, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.12.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 12, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5632, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9694, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.987701, |
| "top_16_exponent_coverage": 0.999908, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478447996, |
| "huffman_bits_per_exponent": 2.6073, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999908, |
| "palette_row_fraction": 0.99426, |
| "verbatim_row_fraction": 0.00574, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.12.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 12, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.4779, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5579, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5629, |
| "huffman_bits_per_exponent": 1.0994, |
| "huffman_full_ratio": 0.5707, |
| "huffman_size_reduction_pct": 42.93, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.12.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 12, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5654, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9685, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.986993, |
| "top_16_exponent_coverage": 0.999891, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68230490, |
| "huffman_bits_per_exponent": 2.6028, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999891, |
| "palette_row_fraction": 0.99364, |
| "verbatim_row_fraction": 0.00636, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.12.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 12, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.678, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9702, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.97815, |
| "top_16_exponent_coverage": 0.99983, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106391038, |
| "huffman_bits_per_exponent": 2.7057, |
| "huffman_full_ratio": 0.6691, |
| "huffman_size_reduction_pct": 33.09, |
| "palette_element_coverage": 0.99983, |
| "palette_row_fraction": 0.989159, |
| "verbatim_row_fraction": 0.010841, |
| "palette_exponent_ratio": 0.5074, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.5.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 5, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.8237, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 6.276, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 6367, |
| "huffman_bits_per_exponent": 1.2436, |
| "huffman_full_ratio": 0.5802, |
| "huffman_size_reduction_pct": 41.98, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.5.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 5, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5498, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9708, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988507, |
| "top_16_exponent_coverage": 0.999907, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238476067, |
| "huffman_bits_per_exponent": 2.5992, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999907, |
| "palette_row_fraction": 0.994131, |
| "verbatim_row_fraction": 0.005869, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.5.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 5, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1062, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.6397, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5206, |
| "huffman_bits_per_exponent": 1.0168, |
| "huffman_full_ratio": 0.566, |
| "huffman_size_reduction_pct": 43.4, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.6.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 6, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.8057, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.9805, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 6305, |
| "huffman_bits_per_exponent": 1.2314, |
| "huffman_full_ratio": 0.5794, |
| "huffman_size_reduction_pct": 42.06, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.6.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 6, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5493, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9706, |
| "unique_exponents": 36, |
| "top_8_exponent_coverage": 0.98851, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238476281, |
| "huffman_bits_per_exponent": 2.5992, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994125, |
| "verbatim_row_fraction": 0.005875, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.6.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 6, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.554, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9709, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.9883, |
| "top_16_exponent_coverage": 0.999911, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477105330, |
| "huffman_bits_per_exponent": 2.6, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999911, |
| "palette_row_fraction": 0.994398, |
| "verbatim_row_fraction": 0.005602, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.6.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 6, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.091, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.7401, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5186, |
| "huffman_bits_per_exponent": 1.0129, |
| "huffman_full_ratio": 0.5653, |
| "huffman_size_reduction_pct": 43.47, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.6.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 6, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5729, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9714, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.986305, |
| "top_16_exponent_coverage": 0.999881, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68255105, |
| "huffman_bits_per_exponent": 2.6037, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999881, |
| "palette_row_fraction": 0.993066, |
| "verbatim_row_fraction": 0.006934, |
| "palette_exponent_ratio": 0.5054, |
| "palette_full_ratio": 0.7527, |
| "palette_size_reduction_pct": 24.73 |
| }, |
| { |
| "name": "model.layers.6.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 6, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6846, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9718, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.977827, |
| "top_16_exponent_coverage": 0.99982, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106635310, |
| "huffman_bits_per_exponent": 2.7119, |
| "huffman_full_ratio": 0.6695, |
| "huffman_size_reduction_pct": 33.05, |
| "palette_element_coverage": 0.99982, |
| "palette_row_fraction": 0.988555, |
| "verbatim_row_fraction": 0.011445, |
| "palette_exponent_ratio": 0.5077, |
| "palette_full_ratio": 0.7538, |
| "palette_size_reduction_pct": 24.62 |
| }, |
| { |
| "name": "model.layers.7.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 7, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1033, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.18, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5199, |
| "huffman_bits_per_exponent": 1.0154, |
| "huffman_full_ratio": 0.5659, |
| "huffman_size_reduction_pct": 43.41, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.7.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 7, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.549, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9703, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988519, |
| "top_16_exponent_coverage": 0.999908, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238434350, |
| "huffman_bits_per_exponent": 2.5987, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999908, |
| "palette_row_fraction": 0.994237, |
| "verbatim_row_fraction": 0.005763, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.7.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 7, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5542, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9706, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988286, |
| "top_16_exponent_coverage": 0.99991, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477051072, |
| "huffman_bits_per_exponent": 2.5997, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.99991, |
| "palette_row_fraction": 0.994358, |
| "verbatim_row_fraction": 0.005642, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.7.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 7, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0787, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.8207, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5176, |
| "huffman_bits_per_exponent": 1.0109, |
| "huffman_full_ratio": 0.5651, |
| "huffman_size_reduction_pct": 43.49, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.7.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 7, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5572, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9712, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.986907, |
| "top_16_exponent_coverage": 0.999891, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67927442, |
| "huffman_bits_per_exponent": 2.5912, |
| "huffman_full_ratio": 0.662, |
| "huffman_size_reduction_pct": 33.8, |
| "palette_element_coverage": 0.999891, |
| "palette_row_fraction": 0.993477, |
| "verbatim_row_fraction": 0.006523, |
| "palette_exponent_ratio": 0.5052, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.7.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 7, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.671, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9718, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.978855, |
| "top_16_exponent_coverage": 0.999828, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106181668, |
| "huffman_bits_per_exponent": 2.7003, |
| "huffman_full_ratio": 0.6688, |
| "huffman_size_reduction_pct": 33.12, |
| "palette_element_coverage": 0.999828, |
| "palette_row_fraction": 0.989074, |
| "verbatim_row_fraction": 0.010926, |
| "palette_exponent_ratio": 0.5074, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.8.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 8, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0869, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.3302, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5191, |
| "huffman_bits_per_exponent": 1.0139, |
| "huffman_full_ratio": 0.5658, |
| "huffman_size_reduction_pct": 43.42, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.8.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 8, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5489, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9701, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98854, |
| "top_16_exponent_coverage": 0.999909, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238401193, |
| "huffman_bits_per_exponent": 2.5984, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999909, |
| "palette_row_fraction": 0.994321, |
| "verbatim_row_fraction": 0.005679, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.8.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 8, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5549, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9703, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.988242, |
| "top_16_exponent_coverage": 0.999913, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477107300, |
| "huffman_bits_per_exponent": 2.6, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999913, |
| "palette_row_fraction": 0.994513, |
| "verbatim_row_fraction": 0.005487, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.8.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 8, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0721, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9401, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5168, |
| "huffman_bits_per_exponent": 1.0094, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.8.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 8, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.56, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.971, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.986785, |
| "top_16_exponent_coverage": 0.999884, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 67971582, |
| "huffman_bits_per_exponent": 2.5929, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999884, |
| "palette_row_fraction": 0.993369, |
| "verbatim_row_fraction": 0.006631, |
| "palette_exponent_ratio": 0.5053, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.8.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 8, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6696, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9715, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.97897, |
| "top_16_exponent_coverage": 0.999831, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106138697, |
| "huffman_bits_per_exponent": 2.6992, |
| "huffman_full_ratio": 0.6687, |
| "huffman_size_reduction_pct": 33.13, |
| "palette_element_coverage": 0.999831, |
| "palette_row_fraction": 0.989237, |
| "verbatim_row_fraction": 0.010763, |
| "palette_exponent_ratio": 0.5073, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.9.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 9, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1507, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9478, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5233, |
| "huffman_bits_per_exponent": 1.0221, |
| "huffman_full_ratio": 0.5658, |
| "huffman_size_reduction_pct": 43.42, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.9.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 9, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5497, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9698, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988479, |
| "top_16_exponent_coverage": 0.999903, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238306920, |
| "huffman_bits_per_exponent": 2.5973, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999903, |
| "palette_row_fraction": 0.994, |
| "verbatim_row_fraction": 0.006, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.9.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 9, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5588, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9702, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987931, |
| "top_16_exponent_coverage": 0.999907, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477740648, |
| "huffman_bits_per_exponent": 2.6035, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999907, |
| "palette_row_fraction": 0.99426, |
| "verbatim_row_fraction": 0.00574, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.9.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 9, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0511, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9456, |
| "unique_exponents": 3, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5147, |
| "huffman_bits_per_exponent": 1.0053, |
| "huffman_full_ratio": 0.5643, |
| "huffman_size_reduction_pct": 43.57, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.9.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 9, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5575, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9705, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987427, |
| "top_16_exponent_coverage": 0.999896, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68062290, |
| "huffman_bits_per_exponent": 2.5964, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999896, |
| "palette_row_fraction": 0.993706, |
| "verbatim_row_fraction": 0.006294, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.9.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 9, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6389, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9714, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.980121, |
| "top_16_exponent_coverage": 0.99984, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105138343, |
| "huffman_bits_per_exponent": 2.6738, |
| "huffman_full_ratio": 0.6671, |
| "huffman_size_reduction_pct": 33.29, |
| "palette_element_coverage": 0.99984, |
| "palette_row_fraction": 0.989824, |
| "verbatim_row_fraction": 0.010176, |
| "palette_exponent_ratio": 0.507, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.13.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 13, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0473, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.2481, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5153, |
| "huffman_bits_per_exponent": 1.0064, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.13.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 13, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988334, |
| "top_16_exponent_coverage": 0.999893, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238866731, |
| "huffman_bits_per_exponent": 2.6034, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999893, |
| "palette_row_fraction": 0.994156, |
| "verbatim_row_fraction": 0.005844, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.13.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 13, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5643, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9693, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987659, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478647462, |
| "huffman_bits_per_exponent": 2.6084, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994217, |
| "verbatim_row_fraction": 0.005783, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.13.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 13, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.3824, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5976, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5491, |
| "huffman_bits_per_exponent": 1.0725, |
| "huffman_full_ratio": 0.569, |
| "huffman_size_reduction_pct": 43.1, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.13.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 13, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.567, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9679, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987405, |
| "top_16_exponent_coverage": 0.999886, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68309373, |
| "huffman_bits_per_exponent": 2.6058, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.999886, |
| "palette_row_fraction": 0.993911, |
| "verbatim_row_fraction": 0.006089, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.13.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 13, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6362, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9697, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.979911, |
| "top_16_exponent_coverage": 0.999841, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105004640, |
| "huffman_bits_per_exponent": 2.6704, |
| "huffman_full_ratio": 0.6669, |
| "huffman_size_reduction_pct": 33.31, |
| "palette_element_coverage": 0.999841, |
| "palette_row_fraction": 0.989883, |
| "verbatim_row_fraction": 0.010117, |
| "palette_exponent_ratio": 0.507, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.14.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 14, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0518, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.4473, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5154, |
| "huffman_bits_per_exponent": 1.0066, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.14.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 14, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988324, |
| "top_16_exponent_coverage": 0.999896, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238880590, |
| "huffman_bits_per_exponent": 2.6036, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999896, |
| "palette_row_fraction": 0.994086, |
| "verbatim_row_fraction": 0.005914, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.14.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 14, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5647, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9693, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.987554, |
| "top_16_exponent_coverage": 0.999897, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478751076, |
| "huffman_bits_per_exponent": 2.609, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999897, |
| "palette_row_fraction": 0.993898, |
| "verbatim_row_fraction": 0.006102, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.14.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 14, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.2836, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.6184, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5367, |
| "huffman_bits_per_exponent": 1.0482, |
| "huffman_full_ratio": 0.5675, |
| "huffman_size_reduction_pct": 43.25, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.14.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 14, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5669, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9672, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987354, |
| "top_16_exponent_coverage": 0.99989, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68295971, |
| "huffman_bits_per_exponent": 2.6053, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.99989, |
| "palette_row_fraction": 0.993931, |
| "verbatim_row_fraction": 0.006069, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.14.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 14, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9693, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.97907, |
| "top_16_exponent_coverage": 0.999839, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105614770, |
| "huffman_bits_per_exponent": 2.6859, |
| "huffman_full_ratio": 0.6679, |
| "huffman_size_reduction_pct": 33.21, |
| "palette_element_coverage": 0.999839, |
| "palette_row_fraction": 0.989751, |
| "verbatim_row_fraction": 0.010249, |
| "palette_exponent_ratio": 0.5071, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.15.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 15, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0573, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.4555, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5157, |
| "huffman_bits_per_exponent": 1.0072, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.15.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 15, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5559, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988321, |
| "top_16_exponent_coverage": 0.999895, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238970153, |
| "huffman_bits_per_exponent": 2.6046, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999895, |
| "palette_row_fraction": 0.994113, |
| "verbatim_row_fraction": 0.005887, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.15.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 15, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5647, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9693, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987656, |
| "top_16_exponent_coverage": 0.9999, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478822134, |
| "huffman_bits_per_exponent": 2.6094, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.9999, |
| "palette_row_fraction": 0.993892, |
| "verbatim_row_fraction": 0.006108, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.15.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 15, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.232, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5127, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5310, |
| "huffman_bits_per_exponent": 1.0371, |
| "huffman_full_ratio": 0.5668, |
| "huffman_size_reduction_pct": 43.32, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.15.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 15, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5614, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9674, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.98753, |
| "top_16_exponent_coverage": 0.999887, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68158914, |
| "huffman_bits_per_exponent": 2.6001, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999887, |
| "palette_row_fraction": 0.99373, |
| "verbatim_row_fraction": 0.00627, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.15.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 15, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6499, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9691, |
| "unique_exponents": 28, |
| "top_8_exponent_coverage": 0.979127, |
| "top_16_exponent_coverage": 0.99984, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105416929, |
| "huffman_bits_per_exponent": 2.6809, |
| "huffman_full_ratio": 0.6676, |
| "huffman_size_reduction_pct": 33.24, |
| "palette_element_coverage": 0.99984, |
| "palette_row_fraction": 0.989808, |
| "verbatim_row_fraction": 0.010192, |
| "palette_exponent_ratio": 0.507, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.16.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 16, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0557, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.2899, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5156, |
| "huffman_bits_per_exponent": 1.007, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.16.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 16, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5568, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9688, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988308, |
| "top_16_exponent_coverage": 0.999898, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238984886, |
| "huffman_bits_per_exponent": 2.6047, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999898, |
| "palette_row_fraction": 0.993943, |
| "verbatim_row_fraction": 0.006057, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.16.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 16, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5687, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9692, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987394, |
| "top_16_exponent_coverage": 0.999886, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 479502747, |
| "huffman_bits_per_exponent": 2.6131, |
| "huffman_full_ratio": 0.6633, |
| "huffman_size_reduction_pct": 33.67, |
| "palette_element_coverage": 0.999886, |
| "palette_row_fraction": 0.993603, |
| "verbatim_row_fraction": 0.006397, |
| "palette_exponent_ratio": 0.5052, |
| "palette_full_ratio": 0.7526, |
| "palette_size_reduction_pct": 24.74 |
| }, |
| { |
| "name": "model.layers.16.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 16, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1846, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.3944, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5262, |
| "huffman_bits_per_exponent": 1.0277, |
| "huffman_full_ratio": 0.5662, |
| "huffman_size_reduction_pct": 43.38, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.16.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 16, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5633, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9663, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.987695, |
| "top_16_exponent_coverage": 0.999896, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68234088, |
| "huffman_bits_per_exponent": 2.6029, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999896, |
| "palette_row_fraction": 0.994004, |
| "verbatim_row_fraction": 0.005996, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.16.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 16, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6561, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.978966, |
| "top_16_exponent_coverage": 0.999833, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105629557, |
| "huffman_bits_per_exponent": 2.6863, |
| "huffman_full_ratio": 0.6679, |
| "huffman_size_reduction_pct": 33.21, |
| "palette_element_coverage": 0.999833, |
| "palette_row_fraction": 0.989339, |
| "verbatim_row_fraction": 0.010661, |
| "palette_exponent_ratio": 0.5073, |
| "palette_full_ratio": 0.7536, |
| "palette_size_reduction_pct": 24.64 |
| }, |
| { |
| "name": "model.layers.17.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 17, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0752, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.3565, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5170, |
| "huffman_bits_per_exponent": 1.0098, |
| "huffman_full_ratio": 0.5651, |
| "huffman_size_reduction_pct": 43.49, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.17.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 17, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5558, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9685, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988345, |
| "top_16_exponent_coverage": 0.999902, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238922418, |
| "huffman_bits_per_exponent": 2.604, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999902, |
| "palette_row_fraction": 0.994049, |
| "verbatim_row_fraction": 0.005951, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.17.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 17, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5655, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9691, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987606, |
| "top_16_exponent_coverage": 0.999899, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478961204, |
| "huffman_bits_per_exponent": 2.6101, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999899, |
| "palette_row_fraction": 0.993922, |
| "verbatim_row_fraction": 0.006078, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.17.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 17, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0986, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.3691, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5187, |
| "huffman_bits_per_exponent": 1.0131, |
| "huffman_full_ratio": 0.5653, |
| "huffman_size_reduction_pct": 43.47, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.17.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 17, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5705, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9669, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.98742, |
| "top_16_exponent_coverage": 0.999893, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68414730, |
| "huffman_bits_per_exponent": 2.6098, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999893, |
| "palette_row_fraction": 0.993752, |
| "verbatim_row_fraction": 0.006248, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.17.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 17, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6704, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9688, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.977872, |
| "top_16_exponent_coverage": 0.99983, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 106128823, |
| "huffman_bits_per_exponent": 2.699, |
| "huffman_full_ratio": 0.6687, |
| "huffman_size_reduction_pct": 33.13, |
| "palette_element_coverage": 0.99983, |
| "palette_row_fraction": 0.989217, |
| "verbatim_row_fraction": 0.010783, |
| "palette_exponent_ratio": 0.5073, |
| "palette_full_ratio": 0.7537, |
| "palette_size_reduction_pct": 24.63 |
| }, |
| { |
| "name": "model.layers.18.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 18, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0734, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.2507, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5168, |
| "huffman_bits_per_exponent": 1.0094, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.18.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 18, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9683, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988375, |
| "top_16_exponent_coverage": 0.999902, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238785878, |
| "huffman_bits_per_exponent": 2.6026, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999902, |
| "palette_row_fraction": 0.994035, |
| "verbatim_row_fraction": 0.005965, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.18.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 18, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5643, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987603, |
| "top_16_exponent_coverage": 0.999899, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478730179, |
| "huffman_bits_per_exponent": 2.6089, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999899, |
| "palette_row_fraction": 0.99389, |
| "verbatim_row_fraction": 0.00611, |
| "palette_exponent_ratio": 0.505, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.18.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 18, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0849, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.3377, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5176, |
| "huffman_bits_per_exponent": 1.0109, |
| "huffman_full_ratio": 0.5651, |
| "huffman_size_reduction_pct": 43.49, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.18.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 18, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5775, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9664, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98686, |
| "top_16_exponent_coverage": 0.999886, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68653319, |
| "huffman_bits_per_exponent": 2.6189, |
| "huffman_full_ratio": 0.6637, |
| "huffman_size_reduction_pct": 33.63, |
| "palette_element_coverage": 0.999886, |
| "palette_row_fraction": 0.993774, |
| "verbatim_row_fraction": 0.006226, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.18.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 18, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6571, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9688, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.978666, |
| "top_16_exponent_coverage": 0.999834, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105655857, |
| "huffman_bits_per_exponent": 2.687, |
| "huffman_full_ratio": 0.6679, |
| "huffman_size_reduction_pct": 33.21, |
| "palette_element_coverage": 0.999834, |
| "palette_row_fraction": 0.989458, |
| "verbatim_row_fraction": 0.010542, |
| "palette_exponent_ratio": 0.5072, |
| "palette_full_ratio": 0.7536, |
| "palette_size_reduction_pct": 24.64 |
| }, |
| { |
| "name": "model.layers.19.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 19, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0724, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.2359, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5168, |
| "huffman_bits_per_exponent": 1.0094, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.19.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 19, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5544, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9682, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988423, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238667766, |
| "huffman_bits_per_exponent": 2.6013, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994249, |
| "verbatim_row_fraction": 0.005751, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.19.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 19, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5624, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9687, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987742, |
| "top_16_exponent_coverage": 0.999904, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478351222, |
| "huffman_bits_per_exponent": 2.6068, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.999904, |
| "palette_row_fraction": 0.994026, |
| "verbatim_row_fraction": 0.005974, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.19.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 19, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0768, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.3269, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5172, |
| "huffman_bits_per_exponent": 1.0102, |
| "huffman_full_ratio": 0.5651, |
| "huffman_size_reduction_pct": 43.49, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.19.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 19, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5636, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9659, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987742, |
| "top_16_exponent_coverage": 0.99989, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68332555, |
| "huffman_bits_per_exponent": 2.6067, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.99989, |
| "palette_row_fraction": 0.994258, |
| "verbatim_row_fraction": 0.005742, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.19.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 19, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6313, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9686, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98026, |
| "top_16_exponent_coverage": 0.999839, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104761838, |
| "huffman_bits_per_exponent": 2.6642, |
| "huffman_full_ratio": 0.6665, |
| "huffman_size_reduction_pct": 33.35, |
| "palette_element_coverage": 0.999839, |
| "palette_row_fraction": 0.98973, |
| "verbatim_row_fraction": 0.01027, |
| "palette_exponent_ratio": 0.5071, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.20.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 20, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5616, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9658, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.987958, |
| "top_16_exponent_coverage": 0.999903, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68393150, |
| "huffman_bits_per_exponent": 2.609, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999903, |
| "palette_row_fraction": 0.994446, |
| "verbatim_row_fraction": 0.005554, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.20.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 20, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6148, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9683, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.981373, |
| "top_16_exponent_coverage": 0.999856, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104228689, |
| "huffman_bits_per_exponent": 2.6507, |
| "huffman_full_ratio": 0.6657, |
| "huffman_size_reduction_pct": 33.43, |
| "palette_element_coverage": 0.999856, |
| "palette_row_fraction": 0.991257, |
| "verbatim_row_fraction": 0.008743, |
| "palette_exponent_ratio": 0.5063, |
| "palette_full_ratio": 0.7532, |
| "palette_size_reduction_pct": 24.68 |
| }, |
| { |
| "name": "model.layers.20.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 20, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0643, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0308, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5161, |
| "huffman_bits_per_exponent": 1.008, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.20.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 20, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9679, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.988425, |
| "top_16_exponent_coverage": 0.999911, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238730349, |
| "huffman_bits_per_exponent": 2.602, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999911, |
| "palette_row_fraction": 0.994396, |
| "verbatim_row_fraction": 0.005604, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.20.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 20, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5639, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9686, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.987635, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478566860, |
| "huffman_bits_per_exponent": 2.608, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994158, |
| "verbatim_row_fraction": 0.005842, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.20.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 20, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0566, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.158, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5155, |
| "huffman_bits_per_exponent": 1.0068, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.21.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 21, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0691, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9149, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5165, |
| "huffman_bits_per_exponent": 1.0088, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.21.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 21, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.554, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9679, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988462, |
| "top_16_exponent_coverage": 0.999908, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238611395, |
| "huffman_bits_per_exponent": 2.6007, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999908, |
| "palette_row_fraction": 0.994256, |
| "verbatim_row_fraction": 0.005744, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.21.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 21, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5634, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9684, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.987616, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478448087, |
| "huffman_bits_per_exponent": 2.6073, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994141, |
| "verbatim_row_fraction": 0.005859, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.21.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 21, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0542, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.152, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5154, |
| "huffman_bits_per_exponent": 1.0066, |
| "huffman_full_ratio": 0.5649, |
| "huffman_size_reduction_pct": 43.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.21.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 21, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5683, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9667, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.98743, |
| "top_16_exponent_coverage": 0.9999, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68552605, |
| "huffman_bits_per_exponent": 2.6151, |
| "huffman_full_ratio": 0.6634, |
| "huffman_size_reduction_pct": 33.66, |
| "palette_element_coverage": 0.9999, |
| "palette_row_fraction": 0.994287, |
| "verbatim_row_fraction": 0.005713, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.21.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 21, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6277, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.980951, |
| "top_16_exponent_coverage": 0.999843, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104623297, |
| "huffman_bits_per_exponent": 2.6607, |
| "huffman_full_ratio": 0.6663, |
| "huffman_size_reduction_pct": 33.37, |
| "palette_element_coverage": 0.999843, |
| "palette_row_fraction": 0.990322, |
| "verbatim_row_fraction": 0.009678, |
| "palette_exponent_ratio": 0.5068, |
| "palette_full_ratio": 0.7534, |
| "palette_size_reduction_pct": 24.66 |
| }, |
| { |
| "name": "model.layers.22.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 22, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0762, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9507, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5170, |
| "huffman_bits_per_exponent": 1.0098, |
| "huffman_full_ratio": 0.5651, |
| "huffman_size_reduction_pct": 43.49, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.22.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 22, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5537, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9677, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.988473, |
| "top_16_exponent_coverage": 0.99991, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238534938, |
| "huffman_bits_per_exponent": 2.5998, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.99991, |
| "palette_row_fraction": 0.994337, |
| "verbatim_row_fraction": 0.005663, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.22.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 22, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5635, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9683, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.987557, |
| "top_16_exponent_coverage": 0.999907, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478442651, |
| "huffman_bits_per_exponent": 2.6073, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999907, |
| "palette_row_fraction": 0.994129, |
| "verbatim_row_fraction": 0.005871, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.22.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 22, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0464, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.019, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5149, |
| "huffman_bits_per_exponent": 1.0057, |
| "huffman_full_ratio": 0.5648, |
| "huffman_size_reduction_pct": 43.52, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.22.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 22, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5712, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9666, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987226, |
| "top_16_exponent_coverage": 0.999899, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68635614, |
| "huffman_bits_per_exponent": 2.6182, |
| "huffman_full_ratio": 0.6636, |
| "huffman_size_reduction_pct": 33.64, |
| "palette_element_coverage": 0.999899, |
| "palette_row_fraction": 0.994236, |
| "verbatim_row_fraction": 0.005764, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.22.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 22, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6357, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9687, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.980618, |
| "top_16_exponent_coverage": 0.999849, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104884360, |
| "huffman_bits_per_exponent": 2.6673, |
| "huffman_full_ratio": 0.6667, |
| "huffman_size_reduction_pct": 33.33, |
| "palette_element_coverage": 0.999849, |
| "palette_row_fraction": 0.990664, |
| "verbatim_row_fraction": 0.009336, |
| "palette_exponent_ratio": 0.5066, |
| "palette_full_ratio": 0.7533, |
| "palette_size_reduction_pct": 24.67 |
| }, |
| { |
| "name": "model.layers.23.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 23, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0911, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9676, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5181, |
| "huffman_bits_per_exponent": 1.0119, |
| "huffman_full_ratio": 0.5652, |
| "huffman_size_reduction_pct": 43.48, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.23.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 23, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.553, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9677, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.98853, |
| "top_16_exponent_coverage": 0.999912, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238456634, |
| "huffman_bits_per_exponent": 2.599, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999912, |
| "palette_row_fraction": 0.994478, |
| "verbatim_row_fraction": 0.005522, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.23.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 23, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5626, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9683, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987594, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478273375, |
| "huffman_bits_per_exponent": 2.6064, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994177, |
| "verbatim_row_fraction": 0.005823, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.23.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 23, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0458, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.9799, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5148, |
| "huffman_bits_per_exponent": 1.0055, |
| "huffman_full_ratio": 0.5648, |
| "huffman_size_reduction_pct": 43.52, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.23.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 23, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.558, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9671, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.988164, |
| "top_16_exponent_coverage": 0.999896, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68322078, |
| "huffman_bits_per_exponent": 2.6063, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.999896, |
| "palette_row_fraction": 0.994358, |
| "verbatim_row_fraction": 0.005642, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.23.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 23, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6116, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9685, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.982637, |
| "top_16_exponent_coverage": 0.999856, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104065576, |
| "huffman_bits_per_exponent": 2.6465, |
| "huffman_full_ratio": 0.6654, |
| "huffman_size_reduction_pct": 33.46, |
| "palette_element_coverage": 0.999856, |
| "palette_row_fraction": 0.991261, |
| "verbatim_row_fraction": 0.008739, |
| "palette_exponent_ratio": 0.5063, |
| "palette_full_ratio": 0.7532, |
| "palette_size_reduction_pct": 24.68 |
| }, |
| { |
| "name": "model.layers.24.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 24, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1008, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.9509, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5189, |
| "huffman_bits_per_exponent": 1.0135, |
| "huffman_full_ratio": 0.5653, |
| "huffman_size_reduction_pct": 43.47, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.24.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 24, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5523, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988553, |
| "top_16_exponent_coverage": 0.999914, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238389535, |
| "huffman_bits_per_exponent": 2.5982, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999914, |
| "palette_row_fraction": 0.994575, |
| "verbatim_row_fraction": 0.005425, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.24.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 24, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5616, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9681, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987616, |
| "top_16_exponent_coverage": 0.999911, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478084760, |
| "huffman_bits_per_exponent": 2.6054, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999911, |
| "palette_row_fraction": 0.994347, |
| "verbatim_row_fraction": 0.005653, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.24.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 24, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0458, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.9508, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5148, |
| "huffman_bits_per_exponent": 1.0055, |
| "huffman_full_ratio": 0.5648, |
| "huffman_size_reduction_pct": 43.52, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.24.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 24, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.559, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9668, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.988112, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68300168, |
| "huffman_bits_per_exponent": 2.6054, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994421, |
| "verbatim_row_fraction": 0.005579, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.24.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 24, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6255, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9685, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.981951, |
| "top_16_exponent_coverage": 0.999859, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104517749, |
| "huffman_bits_per_exponent": 2.658, |
| "huffman_full_ratio": 0.6661, |
| "huffman_size_reduction_pct": 33.39, |
| "palette_element_coverage": 0.999859, |
| "palette_row_fraction": 0.991315, |
| "verbatim_row_fraction": 0.008685, |
| "palette_exponent_ratio": 0.5063, |
| "palette_full_ratio": 0.7531, |
| "palette_size_reduction_pct": 24.69 |
| }, |
| { |
| "name": "model.layers.25.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 25, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1026, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0343, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5189, |
| "huffman_bits_per_exponent": 1.0135, |
| "huffman_full_ratio": 0.5653, |
| "huffman_size_reduction_pct": 43.47, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.25.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 25, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5521, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9673, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988551, |
| "top_16_exponent_coverage": 0.999911, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238347011, |
| "huffman_bits_per_exponent": 2.5978, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.999911, |
| "palette_row_fraction": 0.994396, |
| "verbatim_row_fraction": 0.005604, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.25.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 25, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5618, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.968, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987566, |
| "top_16_exponent_coverage": 0.999912, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478078982, |
| "huffman_bits_per_exponent": 2.6053, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999912, |
| "palette_row_fraction": 0.994385, |
| "verbatim_row_fraction": 0.005615, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.25.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 25, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0426, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.9019, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5146, |
| "huffman_bits_per_exponent": 1.0051, |
| "huffman_full_ratio": 0.5648, |
| "huffman_size_reduction_pct": 43.52, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.25.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 25, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5647, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987589, |
| "top_16_exponent_coverage": 0.999889, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68379451, |
| "huffman_bits_per_exponent": 2.6085, |
| "huffman_full_ratio": 0.663, |
| "huffman_size_reduction_pct": 33.7, |
| "palette_element_coverage": 0.999889, |
| "palette_row_fraction": 0.994304, |
| "verbatim_row_fraction": 0.005696, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.25.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 25, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6131, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9688, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.983176, |
| "top_16_exponent_coverage": 0.999873, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104092898, |
| "huffman_bits_per_exponent": 2.6472, |
| "huffman_full_ratio": 0.6655, |
| "huffman_size_reduction_pct": 33.45, |
| "palette_element_coverage": 0.999873, |
| "palette_row_fraction": 0.992088, |
| "verbatim_row_fraction": 0.007912, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.753, |
| "palette_size_reduction_pct": 24.7 |
| }, |
| { |
| "name": "model.layers.26.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 26, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.133, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0097, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5215, |
| "huffman_bits_per_exponent": 1.0186, |
| "huffman_full_ratio": 0.5656, |
| "huffman_size_reduction_pct": 43.44, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.26.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 26, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5512, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9672, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.988595, |
| "top_16_exponent_coverage": 0.999913, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238260270, |
| "huffman_bits_per_exponent": 2.5968, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999913, |
| "palette_row_fraction": 0.994478, |
| "verbatim_row_fraction": 0.005522, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.26.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 26, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5608, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9679, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987608, |
| "top_16_exponent_coverage": 0.999912, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477896806, |
| "huffman_bits_per_exponent": 2.6043, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999912, |
| "palette_row_fraction": 0.994408, |
| "verbatim_row_fraction": 0.005592, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.26.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 26, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0387, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.8833, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5143, |
| "huffman_bits_per_exponent": 1.0045, |
| "huffman_full_ratio": 0.5647, |
| "huffman_size_reduction_pct": 43.53, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.26.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 26, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.558, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 28, |
| "top_8_exponent_coverage": 0.988091, |
| "top_16_exponent_coverage": 0.999906, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68203316, |
| "huffman_bits_per_exponent": 2.6018, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999906, |
| "palette_row_fraction": 0.994644, |
| "verbatim_row_fraction": 0.005356, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.26.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 26, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6189, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.983552, |
| "top_16_exponent_coverage": 0.999875, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104265933, |
| "huffman_bits_per_exponent": 2.6516, |
| "huffman_full_ratio": 0.6657, |
| "huffman_size_reduction_pct": 33.43, |
| "palette_element_coverage": 0.999875, |
| "palette_row_fraction": 0.992139, |
| "verbatim_row_fraction": 0.007861, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.27.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 27, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5594, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9668, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.987711, |
| "top_16_exponent_coverage": 0.999898, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68206401, |
| "huffman_bits_per_exponent": 2.6019, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999898, |
| "palette_row_fraction": 0.994756, |
| "verbatim_row_fraction": 0.005244, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.27.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 27, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6183, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.969, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.983581, |
| "top_16_exponent_coverage": 0.999874, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104246035, |
| "huffman_bits_per_exponent": 2.6511, |
| "huffman_full_ratio": 0.6657, |
| "huffman_size_reduction_pct": 33.43, |
| "palette_element_coverage": 0.999874, |
| "palette_row_fraction": 0.992121, |
| "verbatim_row_fraction": 0.007879, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.27.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 27, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1779, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0026, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5256, |
| "huffman_bits_per_exponent": 1.0266, |
| "huffman_full_ratio": 0.5661, |
| "huffman_size_reduction_pct": 43.39, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.27.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 27, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5496, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9671, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988684, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238100967, |
| "huffman_bits_per_exponent": 2.5951, |
| "huffman_full_ratio": 0.6622, |
| "huffman_size_reduction_pct": 33.78, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994722, |
| "verbatim_row_fraction": 0.005278, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.27.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 27, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.557, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9678, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987888, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477268958, |
| "huffman_bits_per_exponent": 2.6009, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994673, |
| "verbatim_row_fraction": 0.005327, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.27.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 27, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0377, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.8337, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5143, |
| "huffman_bits_per_exponent": 1.0045, |
| "huffman_full_ratio": 0.5647, |
| "huffman_size_reduction_pct": 43.53, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.28.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 28, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.4127, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.0471, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5535, |
| "huffman_bits_per_exponent": 1.0811, |
| "huffman_full_ratio": 0.5695, |
| "huffman_size_reduction_pct": 43.05, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.28.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 28, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5488, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9671, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.988706, |
| "top_16_exponent_coverage": 0.999918, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238043097, |
| "huffman_bits_per_exponent": 2.5945, |
| "huffman_full_ratio": 0.6622, |
| "huffman_size_reduction_pct": 33.78, |
| "palette_element_coverage": 0.999918, |
| "palette_row_fraction": 0.994747, |
| "verbatim_row_fraction": 0.005253, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.28.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 28, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5554, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9678, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987995, |
| "top_16_exponent_coverage": 0.999916, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476979467, |
| "huffman_bits_per_exponent": 2.5993, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999916, |
| "palette_row_fraction": 0.99466, |
| "verbatim_row_fraction": 0.00534, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.28.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 28, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0404, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.8186, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5148, |
| "huffman_bits_per_exponent": 1.0055, |
| "huffman_full_ratio": 0.5653, |
| "huffman_size_reduction_pct": 43.47, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.28.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 28, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5585, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.968, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987977, |
| "top_16_exponent_coverage": 0.999902, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68181602, |
| "huffman_bits_per_exponent": 2.6009, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.999902, |
| "palette_row_fraction": 0.994883, |
| "verbatim_row_fraction": 0.005117, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.28.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 28, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6374, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9695, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98256, |
| "top_16_exponent_coverage": 0.999871, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104883699, |
| "huffman_bits_per_exponent": 2.6673, |
| "huffman_full_ratio": 0.6667, |
| "huffman_size_reduction_pct": 33.33, |
| "palette_element_coverage": 0.999871, |
| "palette_row_fraction": 0.991883, |
| "verbatim_row_fraction": 0.008117, |
| "palette_exponent_ratio": 0.506, |
| "palette_full_ratio": 0.753, |
| "palette_size_reduction_pct": 24.7 |
| }, |
| { |
| "name": "model.layers.29.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 29, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.6589, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.7839, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5973, |
| "huffman_bits_per_exponent": 1.1666, |
| "huffman_full_ratio": 0.5749, |
| "huffman_size_reduction_pct": 42.51, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.29.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 29, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.548, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9671, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988736, |
| "top_16_exponent_coverage": 0.999915, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237962191, |
| "huffman_bits_per_exponent": 2.5936, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999915, |
| "palette_row_fraction": 0.994602, |
| "verbatim_row_fraction": 0.005398, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.29.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 29, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5536, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9677, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.988139, |
| "top_16_exponent_coverage": 0.999919, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476660116, |
| "huffman_bits_per_exponent": 2.5976, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999919, |
| "palette_row_fraction": 0.994821, |
| "verbatim_row_fraction": 0.005179, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.29.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 29, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1296, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.7889, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5221, |
| "huffman_bits_per_exponent": 1.0197, |
| "huffman_full_ratio": 0.5662, |
| "huffman_size_reduction_pct": 43.38, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.29.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 29, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5576, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9685, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987915, |
| "top_16_exponent_coverage": 0.999883, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68166911, |
| "huffman_bits_per_exponent": 2.6004, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999883, |
| "palette_row_fraction": 0.994617, |
| "verbatim_row_fraction": 0.005383, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.29.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 29, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.595, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9695, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.984673, |
| "top_16_exponent_coverage": 0.999878, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103502121, |
| "huffman_bits_per_exponent": 2.6322, |
| "huffman_full_ratio": 0.6645, |
| "huffman_size_reduction_pct": 33.55, |
| "palette_element_coverage": 0.999878, |
| "palette_row_fraction": 0.992443, |
| "verbatim_row_fraction": 0.007557, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.30.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 30, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 1.013, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.7177, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 7506, |
| "huffman_bits_per_exponent": 1.466, |
| "huffman_full_ratio": 0.5941, |
| "huffman_size_reduction_pct": 40.59, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.30.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 30, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5477, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9672, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988746, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237923684, |
| "huffman_bits_per_exponent": 2.5932, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994721, |
| "verbatim_row_fraction": 0.005279, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.30.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 30, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5528, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988175, |
| "top_16_exponent_coverage": 0.999918, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476515094, |
| "huffman_bits_per_exponent": 2.5968, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999918, |
| "palette_row_fraction": 0.994762, |
| "verbatim_row_fraction": 0.005238, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.30.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 30, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.4956, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.8962, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5658, |
| "huffman_bits_per_exponent": 1.1051, |
| "huffman_full_ratio": 0.5715, |
| "huffman_size_reduction_pct": 42.85, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.30.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 30, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5567, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9687, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.988071, |
| "top_16_exponent_coverage": 0.999899, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68149965, |
| "huffman_bits_per_exponent": 2.5997, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999899, |
| "palette_row_fraction": 0.994583, |
| "verbatim_row_fraction": 0.005417, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.30.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 30, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5895, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9694, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.984821, |
| "top_16_exponent_coverage": 0.999884, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103335567, |
| "huffman_bits_per_exponent": 2.628, |
| "huffman_full_ratio": 0.6642, |
| "huffman_size_reduction_pct": 33.58, |
| "palette_element_coverage": 0.999884, |
| "palette_row_fraction": 0.992756, |
| "verbatim_row_fraction": 0.007244, |
| "palette_exponent_ratio": 0.5056, |
| "palette_full_ratio": 0.7528, |
| "palette_size_reduction_pct": 24.72 |
| }, |
| { |
| "name": "model.layers.31.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 31, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 1.0205, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.7435, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 7633, |
| "huffman_bits_per_exponent": 1.4908, |
| "huffman_full_ratio": 0.5956, |
| "huffman_size_reduction_pct": 40.44, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.31.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 31, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5476, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9672, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988737, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237896466, |
| "huffman_bits_per_exponent": 2.5929, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994744, |
| "verbatim_row_fraction": 0.005256, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.31.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 31, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.552, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988228, |
| "top_16_exponent_coverage": 0.99992, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476362108, |
| "huffman_bits_per_exponent": 2.596, |
| "huffman_full_ratio": 0.6622, |
| "huffman_size_reduction_pct": 33.78, |
| "palette_element_coverage": 0.99992, |
| "palette_row_fraction": 0.994894, |
| "verbatim_row_fraction": 0.005106, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.31.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 31, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1291, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.7861, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5219, |
| "huffman_bits_per_exponent": 1.0193, |
| "huffman_full_ratio": 0.5661, |
| "huffman_size_reduction_pct": 43.39, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.31.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 31, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5602, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 28, |
| "top_8_exponent_coverage": 0.987095, |
| "top_16_exponent_coverage": 0.999885, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68263078, |
| "huffman_bits_per_exponent": 2.604, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999885, |
| "palette_row_fraction": 0.994644, |
| "verbatim_row_fraction": 0.005356, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.31.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 31, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6135, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.98347, |
| "top_16_exponent_coverage": 0.999875, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104100224, |
| "huffman_bits_per_exponent": 2.6474, |
| "huffman_full_ratio": 0.6655, |
| "huffman_size_reduction_pct": 33.45, |
| "palette_element_coverage": 0.999875, |
| "palette_row_fraction": 0.992142, |
| "verbatim_row_fraction": 0.007858, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.32.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 32, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1688, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.5535, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5251, |
| "huffman_bits_per_exponent": 1.0256, |
| "huffman_full_ratio": 0.5665, |
| "huffman_size_reduction_pct": 43.35, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.32.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 32, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5481, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9674, |
| "unique_exponents": 37, |
| "top_8_exponent_coverage": 0.988694, |
| "top_16_exponent_coverage": 0.999919, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237912183, |
| "huffman_bits_per_exponent": 2.593, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999919, |
| "palette_row_fraction": 0.99484, |
| "verbatim_row_fraction": 0.00516, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.32.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 32, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5517, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.988236, |
| "top_16_exponent_coverage": 0.999919, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476312673, |
| "huffman_bits_per_exponent": 2.5957, |
| "huffman_full_ratio": 0.6622, |
| "huffman_size_reduction_pct": 33.78, |
| "palette_element_coverage": 0.999919, |
| "palette_row_fraction": 0.994818, |
| "verbatim_row_fraction": 0.005182, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.32.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 32, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0873, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.7443, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5186, |
| "huffman_bits_per_exponent": 1.0129, |
| "huffman_full_ratio": 0.5657, |
| "huffman_size_reduction_pct": 43.43, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.32.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 32, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5638, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9682, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.98596, |
| "top_16_exponent_coverage": 0.999888, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68450243, |
| "huffman_bits_per_exponent": 2.6112, |
| "huffman_full_ratio": 0.6632, |
| "huffman_size_reduction_pct": 33.68, |
| "palette_element_coverage": 0.999888, |
| "palette_row_fraction": 0.994988, |
| "verbatim_row_fraction": 0.005012, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7522, |
| "palette_size_reduction_pct": 24.78 |
| }, |
| { |
| "name": "model.layers.32.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 32, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6423, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9695, |
| "unique_exponents": 29, |
| "top_8_exponent_coverage": 0.980589, |
| "top_16_exponent_coverage": 0.999847, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 105100837, |
| "huffman_bits_per_exponent": 2.6729, |
| "huffman_full_ratio": 0.6671, |
| "huffman_size_reduction_pct": 33.29, |
| "palette_element_coverage": 0.999847, |
| "palette_row_fraction": 0.990291, |
| "verbatim_row_fraction": 0.009709, |
| "palette_exponent_ratio": 0.5068, |
| "palette_full_ratio": 0.7534, |
| "palette_size_reduction_pct": 24.66 |
| }, |
| { |
| "name": "model.layers.33.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 33, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1175, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.0546, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5206, |
| "huffman_bits_per_exponent": 1.0168, |
| "huffman_full_ratio": 0.566, |
| "huffman_size_reduction_pct": 43.4, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.33.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 33, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5477, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988698, |
| "top_16_exponent_coverage": 0.999919, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237871563, |
| "huffman_bits_per_exponent": 2.5926, |
| "huffman_full_ratio": 0.662, |
| "huffman_size_reduction_pct": 33.8, |
| "palette_element_coverage": 0.999919, |
| "palette_row_fraction": 0.994827, |
| "verbatim_row_fraction": 0.005173, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.33.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 33, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5521, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988175, |
| "top_16_exponent_coverage": 0.999921, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476356455, |
| "huffman_bits_per_exponent": 2.5959, |
| "huffman_full_ratio": 0.6622, |
| "huffman_size_reduction_pct": 33.78, |
| "palette_element_coverage": 0.999921, |
| "palette_row_fraction": 0.994978, |
| "verbatim_row_fraction": 0.005022, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7522, |
| "palette_size_reduction_pct": 24.78 |
| }, |
| { |
| "name": "model.layers.33.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 33, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0779, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.7141, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5178, |
| "huffman_bits_per_exponent": 1.0113, |
| "huffman_full_ratio": 0.5661, |
| "huffman_size_reduction_pct": 43.39, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.33.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 33, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.561, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9689, |
| "unique_exponents": 28, |
| "top_8_exponent_coverage": 0.987224, |
| "top_16_exponent_coverage": 0.999872, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68264894, |
| "huffman_bits_per_exponent": 2.6041, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999872, |
| "palette_row_fraction": 0.994551, |
| "verbatim_row_fraction": 0.005449, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.33.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 33, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5956, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9699, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.984203, |
| "top_16_exponent_coverage": 0.999876, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103538945, |
| "huffman_bits_per_exponent": 2.6331, |
| "huffman_full_ratio": 0.6646, |
| "huffman_size_reduction_pct": 33.54, |
| "palette_element_coverage": 0.999876, |
| "palette_row_fraction": 0.992119, |
| "verbatim_row_fraction": 0.007881, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.34.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 34, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5607, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9686, |
| "unique_exponents": 28, |
| "top_8_exponent_coverage": 0.987033, |
| "top_16_exponent_coverage": 0.999884, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68280346, |
| "huffman_bits_per_exponent": 2.6047, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999884, |
| "palette_row_fraction": 0.994895, |
| "verbatim_row_fraction": 0.005105, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.34.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 34, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6061, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9694, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.983234, |
| "top_16_exponent_coverage": 0.999876, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103884628, |
| "huffman_bits_per_exponent": 2.6419, |
| "huffman_full_ratio": 0.6651, |
| "huffman_size_reduction_pct": 33.49, |
| "palette_element_coverage": 0.999876, |
| "palette_row_fraction": 0.992142, |
| "verbatim_row_fraction": 0.007858, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "lm_head.weight", |
| "tensor_type": "lm_head", |
| "category": "embedding", |
| "layer_index": -1, |
| "shape": [ |
| 100352, |
| 5120 |
| ], |
| "n_elements": 513802240, |
| "original_bytes": 1027604480, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5553, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9721, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988274, |
| "top_16_exponent_coverage": 0.999914, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 1337180173, |
| "huffman_bits_per_exponent": 2.6025, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999914, |
| "palette_row_fraction": 0.994524, |
| "verbatim_row_fraction": 0.005476, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.34.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 34, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0853, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.0937, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5180, |
| "huffman_bits_per_exponent": 1.0117, |
| "huffman_full_ratio": 0.5657, |
| "huffman_size_reduction_pct": 43.43, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.34.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 34, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5491, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9678, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988625, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 237958895, |
| "huffman_bits_per_exponent": 2.5935, |
| "huffman_full_ratio": 0.6621, |
| "huffman_size_reduction_pct": 33.79, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994722, |
| "verbatim_row_fraction": 0.005278, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.34.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 34, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5527, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988119, |
| "top_16_exponent_coverage": 0.99992, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476459860, |
| "huffman_bits_per_exponent": 2.5965, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.99992, |
| "palette_row_fraction": 0.994925, |
| "verbatim_row_fraction": 0.005075, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7522, |
| "palette_size_reduction_pct": 24.78 |
| }, |
| { |
| "name": "model.layers.34.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 34, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0739, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.7073, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5172, |
| "huffman_bits_per_exponent": 1.0102, |
| "huffman_full_ratio": 0.5656, |
| "huffman_size_reduction_pct": 43.44, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.35.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 35, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0979, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.3433, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5191, |
| "huffman_bits_per_exponent": 1.0139, |
| "huffman_full_ratio": 0.5658, |
| "huffman_size_reduction_pct": 43.42, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.35.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 35, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5521, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9679, |
| "unique_exponents": 35, |
| "top_8_exponent_coverage": 0.988533, |
| "top_16_exponent_coverage": 0.999914, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238185983, |
| "huffman_bits_per_exponent": 2.596, |
| "huffman_full_ratio": 0.6623, |
| "huffman_size_reduction_pct": 33.77, |
| "palette_element_coverage": 0.999914, |
| "palette_row_fraction": 0.994521, |
| "verbatim_row_fraction": 0.005479, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.35.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 35, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.555, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.987904, |
| "top_16_exponent_coverage": 0.99992, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 476827874, |
| "huffman_bits_per_exponent": 2.5985, |
| "huffman_full_ratio": 0.6624, |
| "huffman_size_reduction_pct": 33.76, |
| "palette_element_coverage": 0.99992, |
| "palette_row_fraction": 0.994863, |
| "verbatim_row_fraction": 0.005137, |
| "palette_exponent_ratio": 0.5045, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.35.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 35, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0702, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.697, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5168, |
| "huffman_bits_per_exponent": 1.0094, |
| "huffman_full_ratio": 0.5655, |
| "huffman_size_reduction_pct": 43.45, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.35.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 35, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5677, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9687, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.986054, |
| "top_16_exponent_coverage": 0.999883, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68477962, |
| "huffman_bits_per_exponent": 2.6122, |
| "huffman_full_ratio": 0.6633, |
| "huffman_size_reduction_pct": 33.67, |
| "palette_element_coverage": 0.999883, |
| "palette_row_fraction": 0.994702, |
| "verbatim_row_fraction": 0.005298, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.35.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 35, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.6091, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.982383, |
| "top_16_exponent_coverage": 0.999864, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 104000046, |
| "huffman_bits_per_exponent": 2.6449, |
| "huffman_full_ratio": 0.6653, |
| "huffman_size_reduction_pct": 33.47, |
| "palette_element_coverage": 0.999864, |
| "palette_row_fraction": 0.991523, |
| "verbatim_row_fraction": 0.008477, |
| "palette_exponent_ratio": 0.5062, |
| "palette_full_ratio": 0.7531, |
| "palette_size_reduction_pct": 24.69 |
| }, |
| { |
| "name": "model.layers.36.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 36, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0952, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.1865, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5196, |
| "huffman_bits_per_exponent": 1.0148, |
| "huffman_full_ratio": 0.5659, |
| "huffman_size_reduction_pct": 43.41, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.36.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 36, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5588, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9682, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.98836, |
| "top_16_exponent_coverage": 0.99991, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 238686277, |
| "huffman_bits_per_exponent": 2.6015, |
| "huffman_full_ratio": 0.6626, |
| "huffman_size_reduction_pct": 33.74, |
| "palette_element_coverage": 0.99991, |
| "palette_row_fraction": 0.994313, |
| "verbatim_row_fraction": 0.005687, |
| "palette_exponent_ratio": 0.5048, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.36.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 36, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5575, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9676, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987638, |
| "top_16_exponent_coverage": 0.999918, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477247120, |
| "huffman_bits_per_exponent": 2.6008, |
| "huffman_full_ratio": 0.6625, |
| "huffman_size_reduction_pct": 33.75, |
| "palette_element_coverage": 0.999918, |
| "palette_row_fraction": 0.994791, |
| "verbatim_row_fraction": 0.005209, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.36.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 36, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0673, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 2.7848, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5166, |
| "huffman_bits_per_exponent": 1.009, |
| "huffman_full_ratio": 0.5655, |
| "huffman_size_reduction_pct": 43.45, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.36.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 36, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5618, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.969, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.986883, |
| "top_16_exponent_coverage": 0.999866, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68302481, |
| "huffman_bits_per_exponent": 2.6055, |
| "huffman_full_ratio": 0.6628, |
| "huffman_size_reduction_pct": 33.72, |
| "palette_element_coverage": 0.999866, |
| "palette_row_fraction": 0.994436, |
| "verbatim_row_fraction": 0.005564, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.36.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 36, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5974, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9697, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.983895, |
| "top_16_exponent_coverage": 0.999881, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103597469, |
| "huffman_bits_per_exponent": 2.6346, |
| "huffman_full_ratio": 0.6647, |
| "huffman_size_reduction_pct": 33.53, |
| "palette_element_coverage": 0.999881, |
| "palette_row_fraction": 0.992428, |
| "verbatim_row_fraction": 0.007572, |
| "palette_exponent_ratio": 0.5057, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.37.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 37, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1281, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.2651, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5219, |
| "huffman_bits_per_exponent": 1.0193, |
| "huffman_full_ratio": 0.5661, |
| "huffman_size_reduction_pct": 43.39, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.37.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 37, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5679, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9683, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.988123, |
| "top_16_exponent_coverage": 0.999905, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 239378134, |
| "huffman_bits_per_exponent": 2.609, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999905, |
| "palette_row_fraction": 0.994044, |
| "verbatim_row_fraction": 0.005956, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.37.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 37, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5605, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.987284, |
| "top_16_exponent_coverage": 0.999917, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 477734101, |
| "huffman_bits_per_exponent": 2.6034, |
| "huffman_full_ratio": 0.6627, |
| "huffman_size_reduction_pct": 33.73, |
| "palette_element_coverage": 0.999917, |
| "palette_row_fraction": 0.994715, |
| "verbatim_row_fraction": 0.005285, |
| "palette_exponent_ratio": 0.5046, |
| "palette_full_ratio": 0.7523, |
| "palette_size_reduction_pct": 24.77 |
| }, |
| { |
| "name": "model.layers.37.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 37, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0628, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.0598, |
| "unique_exponents": 4, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5161, |
| "huffman_bits_per_exponent": 1.008, |
| "huffman_full_ratio": 0.565, |
| "huffman_size_reduction_pct": 43.5, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.37.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 37, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5689, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9694, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.986518, |
| "top_16_exponent_coverage": 0.999827, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68442382, |
| "huffman_bits_per_exponent": 2.6109, |
| "huffman_full_ratio": 0.6632, |
| "huffman_size_reduction_pct": 33.68, |
| "palette_element_coverage": 0.999827, |
| "palette_row_fraction": 0.990449, |
| "verbatim_row_fraction": 0.009551, |
| "palette_exponent_ratio": 0.5067, |
| "palette_full_ratio": 0.7534, |
| "palette_size_reduction_pct": 24.66 |
| }, |
| { |
| "name": "model.layers.37.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 37, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5966, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.983859, |
| "top_16_exponent_coverage": 0.999888, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103575645, |
| "huffman_bits_per_exponent": 2.6341, |
| "huffman_full_ratio": 0.6646, |
| "huffman_size_reduction_pct": 33.54, |
| "palette_element_coverage": 0.999888, |
| "palette_row_fraction": 0.992882, |
| "verbatim_row_fraction": 0.007118, |
| "palette_exponent_ratio": 0.5055, |
| "palette_full_ratio": 0.7528, |
| "palette_size_reduction_pct": 24.72 |
| }, |
| { |
| "name": "model.layers.38.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 38, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.1833, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.0329, |
| "unique_exponents": 5, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5265, |
| "huffman_bits_per_exponent": 1.0283, |
| "huffman_full_ratio": 0.5667, |
| "huffman_size_reduction_pct": 43.33, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.38.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 38, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5838, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9686, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987507, |
| "top_16_exponent_coverage": 0.999841, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 240708177, |
| "huffman_bits_per_exponent": 2.6235, |
| "huffman_full_ratio": 0.664, |
| "huffman_size_reduction_pct": 33.6, |
| "palette_element_coverage": 0.999841, |
| "palette_row_fraction": 0.992701, |
| "verbatim_row_fraction": 0.007299, |
| "palette_exponent_ratio": 0.5056, |
| "palette_full_ratio": 0.7528, |
| "palette_size_reduction_pct": 24.72 |
| }, |
| { |
| "name": "model.layers.38.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 38, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5633, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 32, |
| "top_8_exponent_coverage": 0.987004, |
| "top_16_exponent_coverage": 0.999913, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478155860, |
| "huffman_bits_per_exponent": 2.6057, |
| "huffman_full_ratio": 0.6629, |
| "huffman_size_reduction_pct": 33.71, |
| "palette_element_coverage": 0.999913, |
| "palette_row_fraction": 0.994472, |
| "verbatim_row_fraction": 0.005528, |
| "palette_exponent_ratio": 0.5047, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.38.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 38, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0696, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 3.6152, |
| "unique_exponents": 3, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5158, |
| "huffman_bits_per_exponent": 1.0074, |
| "huffman_full_ratio": 0.5644, |
| "huffman_size_reduction_pct": 43.56, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.38.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 38, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5688, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9696, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.98689, |
| "top_16_exponent_coverage": 0.999803, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68429719, |
| "huffman_bits_per_exponent": 2.6104, |
| "huffman_full_ratio": 0.6632, |
| "huffman_size_reduction_pct": 33.68, |
| "palette_element_coverage": 0.999803, |
| "palette_row_fraction": 0.989751, |
| "verbatim_row_fraction": 0.010249, |
| "palette_exponent_ratio": 0.5071, |
| "palette_full_ratio": 0.7535, |
| "palette_size_reduction_pct": 24.65 |
| }, |
| { |
| "name": "model.layers.38.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 38, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5797, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9697, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.985228, |
| "top_16_exponent_coverage": 0.999892, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103035034, |
| "huffman_bits_per_exponent": 2.6203, |
| "huffman_full_ratio": 0.6638, |
| "huffman_size_reduction_pct": 33.62, |
| "palette_element_coverage": 0.999892, |
| "palette_row_fraction": 0.9932, |
| "verbatim_row_fraction": 0.0068, |
| "palette_exponent_ratio": 0.5054, |
| "palette_full_ratio": 0.7527, |
| "palette_size_reduction_pct": 24.73 |
| }, |
| { |
| "name": "model.layers.39.input_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 39, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.6285, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.3094, |
| "unique_exponents": 3, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5893, |
| "huffman_bits_per_exponent": 1.151, |
| "huffman_full_ratio": 0.5734, |
| "huffman_size_reduction_pct": 42.66, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.39.mlp.down_proj.weight", |
| "tensor_type": "mlp_down", |
| "category": "mlp", |
| "layer_index": 39, |
| "shape": [ |
| 5120, |
| 17920 |
| ], |
| "n_elements": 91750400, |
| "original_bytes": 183500800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5843, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.969, |
| "unique_exponents": 34, |
| "top_8_exponent_coverage": 0.9872, |
| "top_16_exponent_coverage": 0.999858, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 240465402, |
| "huffman_bits_per_exponent": 2.6209, |
| "huffman_full_ratio": 0.6638, |
| "huffman_size_reduction_pct": 33.62, |
| "palette_element_coverage": 0.999858, |
| "palette_row_fraction": 0.992144, |
| "verbatim_row_fraction": 0.007856, |
| "palette_exponent_ratio": 0.5059, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.layers.39.mlp.gate_up_proj.weight", |
| "tensor_type": "mlp_gate_up_fused", |
| "category": "mlp", |
| "layer_index": 39, |
| "shape": [ |
| 35840, |
| 5120 |
| ], |
| "n_elements": 183500800, |
| "original_bytes": 367001600, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5625, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9675, |
| "unique_exponents": 31, |
| "top_8_exponent_coverage": 0.985997, |
| "top_16_exponent_coverage": 0.999903, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 478737013, |
| "huffman_bits_per_exponent": 2.6089, |
| "huffman_full_ratio": 0.6631, |
| "huffman_size_reduction_pct": 33.69, |
| "palette_element_coverage": 0.999903, |
| "palette_row_fraction": 0.99417, |
| "verbatim_row_fraction": 0.00583, |
| "palette_exponent_ratio": 0.5049, |
| "palette_full_ratio": 0.7524, |
| "palette_size_reduction_pct": 24.76 |
| }, |
| { |
| "name": "model.layers.39.post_attention_layernorm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": 39, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.0895, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 5.3685, |
| "unique_exponents": 6, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 5196, |
| "huffman_bits_per_exponent": 1.0148, |
| "huffman_full_ratio": 0.5664, |
| "huffman_size_reduction_pct": 43.36, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| }, |
| { |
| "name": "model.layers.39.self_attn.o_proj.weight", |
| "tensor_type": "attn_o", |
| "category": "attention", |
| "layer_index": 39, |
| "shape": [ |
| 5120, |
| 5120 |
| ], |
| "n_elements": 26214400, |
| "original_bytes": 52428800, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5694, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9704, |
| "unique_exponents": 33, |
| "top_8_exponent_coverage": 0.987103, |
| "top_16_exponent_coverage": 0.999697, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 68567775, |
| "huffman_bits_per_exponent": 2.6157, |
| "huffman_full_ratio": 0.6635, |
| "huffman_size_reduction_pct": 33.65, |
| "palette_element_coverage": 0.999697, |
| "palette_row_fraction": 0.988408, |
| "verbatim_row_fraction": 0.011592, |
| "palette_exponent_ratio": 0.5077, |
| "palette_full_ratio": 0.7539, |
| "palette_size_reduction_pct": 24.61 |
| }, |
| { |
| "name": "model.layers.39.self_attn.qkv_proj.weight", |
| "tensor_type": "attn_qkv_fused", |
| "category": "attention", |
| "layer_index": 39, |
| "shape": [ |
| 7680, |
| 5120 |
| ], |
| "n_elements": 39321600, |
| "original_bytes": 78643200, |
| "skipped": false, |
| "exponent_entropy_bits": 2.5787, |
| "sign_entropy_bits": 1.0, |
| "mantissa_entropy_bits": 6.9698, |
| "unique_exponents": 30, |
| "top_8_exponent_coverage": 0.984413, |
| "top_16_exponent_coverage": 0.999876, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 103099116, |
| "huffman_bits_per_exponent": 2.6219, |
| "huffman_full_ratio": 0.6639, |
| "huffman_size_reduction_pct": 33.61, |
| "palette_element_coverage": 0.999876, |
| "palette_row_fraction": 0.992355, |
| "verbatim_row_fraction": 0.007645, |
| "palette_exponent_ratio": 0.5058, |
| "palette_full_ratio": 0.7529, |
| "palette_size_reduction_pct": 24.71 |
| }, |
| { |
| "name": "model.norm.weight", |
| "tensor_type": "layernorm", |
| "category": "layernorm", |
| "layer_index": -1, |
| "shape": [ |
| 5120 |
| ], |
| "n_elements": 5120, |
| "original_bytes": 10240, |
| "skipped": false, |
| "exponent_entropy_bits": 0.8348, |
| "sign_entropy_bits": -0.0, |
| "mantissa_entropy_bits": 4.7541, |
| "unique_exponents": 8, |
| "top_8_exponent_coverage": 1.0, |
| "top_16_exponent_coverage": 1.0, |
| "top_32_exponent_coverage": 1.0, |
| "huffman_exponent_bits": 6422, |
| "huffman_bits_per_exponent": 1.2543, |
| "huffman_full_ratio": 0.5823, |
| "huffman_size_reduction_pct": 41.77, |
| "palette_element_coverage": 1.0, |
| "palette_row_fraction": 1.0, |
| "verbatim_row_fraction": 0.0, |
| "palette_exponent_ratio": 0.5051, |
| "palette_full_ratio": 0.7525, |
| "palette_size_reduction_pct": 24.75 |
| } |
| ] |
| } |