ishangarg183 commited on
Commit
fea5b57
·
verified ·
1 Parent(s): d9ff936

Add assembled activations for llama32-3b-grpo

Browse files
Files changed (33) hide show
  1. .gitattributes +2 -0
  2. v1/assembled_activations/llama32-3b-grpo/activations/activations.pt +3 -0
  3. v1/assembled_activations/llama32-3b-grpo/checkpoints/final.pt +3 -0
  4. v1/assembled_activations/llama32-3b-grpo/features/counterfactual_scores.csv +0 -0
  5. v1/assembled_activations/llama32-3b-grpo/features/counterfactual_scores_by_layer.csv +0 -0
  6. v1/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv +3 -0
  7. v1/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv +3 -0
  8. v1/assembled_activations/llama32-3b-grpo/features/feature_activations.pt +3 -0
  9. v1/assembled_activations/llama32-3b-grpo/features/feature_classification.csv +0 -0
  10. v1/assembled_activations/llama32-3b-grpo/features/merged_classification.csv +0 -0
  11. v1/assembled_activations/llama32-3b-grpo/features/model_layer_stream_patterns.csv +0 -0
  12. v1/assembled_activations/llama32-3b-grpo/features/superposition_analysis.json +0 -0
  13. v1/assembled_activations/llama32-3b-grpo/metrics/aggregate_metrics.json +388 -0
  14. v1/assembled_activations/llama32-3b-grpo/metrics/training_metrics.json +524 -0
  15. v1/assembled_activations/llama32-3b-grpo/plots/aligned_decoder_norm_heatmap.png +3 -0
  16. v1/assembled_activations/llama32-3b-grpo/plots/base_decoder_norm_heatmap.png +3 -0
  17. v1/assembled_activations/llama32-3b-grpo/plots/cf_shift_by_layer.png +3 -0
  18. v1/assembled_activations/llama32-3b-grpo/plots/cf_shift_p95_by_layer.png +3 -0
  19. v1/assembled_activations/llama32-3b-grpo/plots/class_distribution_multilayer.png +3 -0
  20. v1/assembled_activations/llama32-3b-grpo/plots/class_distribution_primary.png +3 -0
  21. v1/assembled_activations/llama32-3b-grpo/plots/cross_layer_cosine_drift_by_stream.png +3 -0
  22. v1/assembled_activations/llama32-3b-grpo/plots/decoder_norm_ratio_by_layer.png +3 -0
  23. v1/assembled_activations/llama32-3b-grpo/plots/feature_layer_trajectories.png +3 -0
  24. v1/assembled_activations/llama32-3b-grpo/plots/feature_sharing_ratio_by_layer.png +3 -0
  25. v1/assembled_activations/llama32-3b-grpo/plots/fve_by_layer.png +3 -0
  26. v1/assembled_activations/llama32-3b-grpo/plots/l0_by_layer.png +3 -0
  27. v1/assembled_activations/llama32-3b-grpo/plots/layer_concentration_entropy.png +3 -0
  28. v1/assembled_activations/llama32-3b-grpo/plots/loss_curves.png +3 -0
  29. v1/assembled_activations/llama32-3b-grpo/plots/max_norm_layer_migration.png +3 -0
  30. v1/assembled_activations/llama32-3b-grpo/plots/rho_histogram_by_layer.png +3 -0
  31. v1/assembled_activations/llama32-3b-grpo/plots/rho_theta_scatter_by_layer.png +3 -0
  32. v1/assembled_activations/llama32-3b-grpo/plots/superposition_by_layer.png +3 -0
  33. v1/assembled_activations/llama32-3b-grpo/plots/theta_by_layer.png +3 -0
.gitattributes CHANGED
@@ -74,3 +74,5 @@ v1/assembled_activations/qwen3-4b-ppo/features/cross_layer_cosine_drift.csv filt
74
  v1/assembled_activations/qwen3-4b-simpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
75
  v1/assembled_activations/llama32-3b-dpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
76
  v1/assembled_activations/llama32-3b-dpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
 
 
 
74
  v1/assembled_activations/qwen3-4b-simpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
75
  v1/assembled_activations/llama32-3b-dpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
76
  v1/assembled_activations/llama32-3b-dpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
77
+ v1/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv filter=lfs diff=lfs merge=lfs -text
78
+ v1/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv filter=lfs diff=lfs merge=lfs -text
v1/assembled_activations/llama32-3b-grpo/activations/activations.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50e648e9f1d3b6ec8529c65495cd95eeb1c436456b22bbd8b259849b01bdb146
3
+ size 4496133925
v1/assembled_activations/llama32-3b-grpo/checkpoints/final.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab5c226343b2042cb69e8cba11d4f0ff6753f3d104d3d8a1a036e8e217e58153
3
+ size 10873439922
v1/assembled_activations/llama32-3b-grpo/features/counterfactual_scores.csv ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/features/counterfactual_scores_by_layer.csv ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/features/cross_layer_cosine_drift.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da7045e64bece542f6164c96fe5d4646c516a3f92c58101857158273d5197575
3
+ size 27244726
v1/assembled_activations/llama32-3b-grpo/features/decoder_layer_profiles.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7e8c1f429a3bfda239e53e7c0638b75f2511ed00bf7556ed10bed296bb20867
3
+ size 13037100
v1/assembled_activations/llama32-3b-grpo/features/feature_activations.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac866d6ddac1c7d94a4840df5b0091f2bbb97a96187f6546419fba5a9cd7ce81
3
+ size 35935153173
v1/assembled_activations/llama32-3b-grpo/features/feature_classification.csv ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/features/merged_classification.csv ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/features/model_layer_stream_patterns.csv ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/features/superposition_analysis.json ADDED
The diff for this file is too large to render. See raw diff
 
v1/assembled_activations/llama32-3b-grpo/metrics/aggregate_metrics.json ADDED
@@ -0,0 +1,388 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crosscoder_kind": "multilayer_sparc",
3
+ "layers": [
4
+ 12,
5
+ 13,
6
+ 14
7
+ ],
8
+ "topk_mode": "model_balanced_layer_agg",
9
+ "topk": 400,
10
+ "expansion_factor": 8,
11
+ "dict_size": 24576,
12
+ "forced_shared_fraction": 0.06,
13
+ "class_counts": {
14
+ "aligned_only": 6084,
15
+ "shared_redirected": 5805,
16
+ "shared_intermediate": 4922,
17
+ "base_only": 3467,
18
+ "shared_attenuated": 2812,
19
+ "shared_aligned": 1486
20
+ },
21
+ "multilayer_class_counts": {
22
+ "drifting_or_rotating": 12498,
23
+ "persistent_aligned_only": 4582,
24
+ "persistent_base_only": 2198,
25
+ "persistent_shared": 2170,
26
+ "localized_aligned_only": 1502,
27
+ "localized_base_only": 1269,
28
+ "mixed_or_ambiguous": 357
29
+ },
30
+ "classification_thresholds": {
31
+ "rho_base_only": 0.4,
32
+ "rho_aligned_only": 0.5552206029447704,
33
+ "rho_shared_low": 0.44024917884104786,
34
+ "rho_shared_high": 0.5552206029447704
35
+ },
36
+ "threshold_sensitivity": {
37
+ "original": {
38
+ "aligned_only": 6084,
39
+ "shared_redirected": 5805,
40
+ "shared_intermediate": 4922,
41
+ "base_only": 3467,
42
+ "shared_attenuated": 2812,
43
+ "shared_aligned": 1486
44
+ },
45
+ "perturbed": {
46
+ "delta_-0.05": {
47
+ "base_only": 878,
48
+ "aligned_only": 2725,
49
+ "shared_aligned": 1499,
50
+ "shared_redirected": 9722,
51
+ "shared_intermediate": 7743,
52
+ "shared_attenuated": 2009,
53
+ "other": 0
54
+ },
55
+ "delta_+0.05": {
56
+ "base_only": 7128,
57
+ "aligned_only": 10726,
58
+ "shared_aligned": 1475,
59
+ "shared_redirected": 771,
60
+ "shared_intermediate": 646,
61
+ "shared_attenuated": 3830,
62
+ "other": 0
63
+ }
64
+ },
65
+ "perturbation": 0.05
66
+ },
67
+ "class_counts_by_layer": {
68
+ "12": {
69
+ "shared_redirected": 8317,
70
+ "shared_intermediate": 6544,
71
+ "base_only": 4585,
72
+ "shared_aligned": 2822,
73
+ "aligned_only": 2308
74
+ },
75
+ "13": {
76
+ "shared_redirected": 9393,
77
+ "shared_intermediate": 8081,
78
+ "base_only": 3041,
79
+ "shared_aligned": 2669,
80
+ "aligned_only": 1392
81
+ },
82
+ "14": {
83
+ "shared_redirected": 9247,
84
+ "base_only": 4609,
85
+ "shared_intermediate": 4229,
86
+ "aligned_only": 2292,
87
+ "shared_aligned": 2107,
88
+ "shared_attenuated": 2092
89
+ }
90
+ },
91
+ "feature_sharing_ratio_by_layer": {
92
+ "12": 0.7195231119791666,
93
+ "13": 0.8196207682291666,
94
+ "14": 0.7191975911458334
95
+ },
96
+ "decoder_amplification_by_layer": {
97
+ "12": {
98
+ "median": 0.9999999814920351,
99
+ "p95": 4.761198471404814
100
+ },
101
+ "13": {
102
+ "median": 0.9999999845770899,
103
+ "p95": 2.7799582920911448
104
+ },
105
+ "14": {
106
+ "median": 0.9999999870003597,
107
+ "p95": 1.9990551070509737
108
+ }
109
+ },
110
+ "classification_thresholds_by_layer": {
111
+ "12": {
112
+ "rho_base_only": 0.3232246755639113,
113
+ "rho_aligned_only": 0.7496195037077811,
114
+ "rho_shared_low": 0.3232246755639113,
115
+ "rho_shared_high": 0.7496195037077811
116
+ },
117
+ "13": {
118
+ "rho_base_only": 0.33013993411090486,
119
+ "rho_aligned_only": 0.7235388298969397,
120
+ "rho_shared_low": 0.33013993411090486,
121
+ "rho_shared_high": 0.7235388298969397
122
+ },
123
+ "14": {
124
+ "rho_base_only": 0.4,
125
+ "rho_aligned_only": 0.6325617662750056,
126
+ "rho_shared_low": 0.43268800458718787,
127
+ "rho_shared_high": 0.6325617662750056
128
+ }
129
+ },
130
+ "threshold_sensitivity_by_layer": {
131
+ "12": {
132
+ "original": {
133
+ "shared_redirected": 8317,
134
+ "shared_intermediate": 6544,
135
+ "base_only": 4585,
136
+ "shared_aligned": 2822,
137
+ "aligned_only": 2308
138
+ },
139
+ "perturbed": {
140
+ "delta_-0.05": {
141
+ "base_only": 3098,
142
+ "aligned_only": 1560,
143
+ "shared_aligned": 3403,
144
+ "shared_redirected": 9201,
145
+ "shared_intermediate": 7314,
146
+ "shared_attenuated": 0,
147
+ "other": 0
148
+ },
149
+ "delta_+0.05": {
150
+ "base_only": 6526,
151
+ "aligned_only": 3453,
152
+ "shared_aligned": 2068,
153
+ "shared_redirected": 7069,
154
+ "shared_intermediate": 5460,
155
+ "shared_attenuated": 0,
156
+ "other": 0
157
+ }
158
+ },
159
+ "perturbation": 0.05
160
+ },
161
+ "13": {
162
+ "original": {
163
+ "shared_redirected": 9393,
164
+ "shared_intermediate": 8081,
165
+ "base_only": 3041,
166
+ "shared_aligned": 2669,
167
+ "aligned_only": 1392
168
+ },
169
+ "perturbed": {
170
+ "delta_-0.05": {
171
+ "base_only": 1800,
172
+ "aligned_only": 845,
173
+ "shared_aligned": 3022,
174
+ "shared_redirected": 10094,
175
+ "shared_intermediate": 8815,
176
+ "shared_attenuated": 0,
177
+ "other": 0
178
+ },
179
+ "delta_+0.05": {
180
+ "base_only": 5121,
181
+ "aligned_only": 2474,
182
+ "shared_aligned": 2031,
183
+ "shared_redirected": 8218,
184
+ "shared_intermediate": 6732,
185
+ "shared_attenuated": 0,
186
+ "other": 0
187
+ }
188
+ },
189
+ "perturbation": 0.05
190
+ },
191
+ "14": {
192
+ "original": {
193
+ "shared_redirected": 9247,
194
+ "base_only": 4609,
195
+ "shared_intermediate": 4229,
196
+ "aligned_only": 2292,
197
+ "shared_aligned": 2107,
198
+ "shared_attenuated": 2092
199
+ },
200
+ "perturbed": {
201
+ "delta_-0.05": {
202
+ "base_only": 2008,
203
+ "aligned_only": 921,
204
+ "shared_aligned": 2965,
205
+ "shared_redirected": 11424,
206
+ "shared_intermediate": 5725,
207
+ "shared_attenuated": 1533,
208
+ "other": 0
209
+ },
210
+ "delta_+0.05": {
211
+ "base_only": 7950,
212
+ "aligned_only": 5052,
213
+ "shared_aligned": 1379,
214
+ "shared_redirected": 5406,
215
+ "shared_intermediate": 2165,
216
+ "shared_attenuated": 2624,
217
+ "other": 0
218
+ }
219
+ },
220
+ "perturbation": 0.05
221
+ }
222
+ },
223
+ "counterfactual_shift_by_layer": {
224
+ "12": {
225
+ "aligned_only": {
226
+ "mean_shift": 2.2766621525107953e-06,
227
+ "median_shift": 0.0,
228
+ "p95_abs_shift": 0.0,
229
+ "count": 6084
230
+ },
231
+ "base_only": {
232
+ "mean_shift": -2.568851394574549e-06,
233
+ "median_shift": 0.0,
234
+ "p95_abs_shift": 0.0,
235
+ "count": 3467
236
+ },
237
+ "shared_aligned": {
238
+ "mean_shift": 0.00018095357842273262,
239
+ "median_shift": 0.0,
240
+ "p95_abs_shift": 0.09195232205092907,
241
+ "count": 1486
242
+ },
243
+ "shared_attenuated": {
244
+ "mean_shift": -1.0541640883041274e-07,
245
+ "median_shift": 0.0,
246
+ "p95_abs_shift": 0.0,
247
+ "count": 2812
248
+ },
249
+ "shared_intermediate": {
250
+ "mean_shift": 2.315150216625542e-08,
251
+ "median_shift": 0.0,
252
+ "p95_abs_shift": 0.0,
253
+ "count": 4922
254
+ },
255
+ "shared_redirected": {
256
+ "mean_shift": -1.3746285198566602e-07,
257
+ "median_shift": 0.0,
258
+ "p95_abs_shift": 0.0,
259
+ "count": 5805
260
+ }
261
+ },
262
+ "13": {
263
+ "aligned_only": {
264
+ "mean_shift": 7.409373586140498e-06,
265
+ "median_shift": 0.0,
266
+ "p95_abs_shift": 0.0,
267
+ "count": 6084
268
+ },
269
+ "base_only": {
270
+ "mean_shift": -1.1053492490492892e-05,
271
+ "median_shift": 0.0,
272
+ "p95_abs_shift": 0.0,
273
+ "count": 3467
274
+ },
275
+ "shared_aligned": {
276
+ "mean_shift": 0.00019484710632688615,
277
+ "median_shift": 0.0,
278
+ "p95_abs_shift": 0.09973335079848766,
279
+ "count": 1486
280
+ },
281
+ "shared_attenuated": {
282
+ "mean_shift": -1.985480392448661e-06,
283
+ "median_shift": 0.0,
284
+ "p95_abs_shift": 0.0,
285
+ "count": 2812
286
+ },
287
+ "shared_intermediate": {
288
+ "mean_shift": 3.9290498327251884e-08,
289
+ "median_shift": 0.0,
290
+ "p95_abs_shift": 0.0,
291
+ "count": 4922
292
+ },
293
+ "shared_redirected": {
294
+ "mean_shift": -1.7551452070176433e-07,
295
+ "median_shift": 0.0,
296
+ "p95_abs_shift": 0.0,
297
+ "count": 5805
298
+ }
299
+ },
300
+ "14": {
301
+ "aligned_only": {
302
+ "mean_shift": 3.364509558595365e-05,
303
+ "median_shift": 0.0,
304
+ "p95_abs_shift": 0.0,
305
+ "count": 6084
306
+ },
307
+ "base_only": {
308
+ "mean_shift": -3.5504677135829413e-05,
309
+ "median_shift": 0.0,
310
+ "p95_abs_shift": 0.0,
311
+ "count": 3467
312
+ },
313
+ "shared_aligned": {
314
+ "mean_shift": 0.0003855111859048331,
315
+ "median_shift": 0.0,
316
+ "p95_abs_shift": 0.11618590541183949,
317
+ "count": 1486
318
+ },
319
+ "shared_attenuated": {
320
+ "mean_shift": -9.63817584883231e-06,
321
+ "median_shift": 0.0,
322
+ "p95_abs_shift": 0.0,
323
+ "count": 2812
324
+ },
325
+ "shared_intermediate": {
326
+ "mean_shift": 2.184999210256547e-07,
327
+ "median_shift": 0.0,
328
+ "p95_abs_shift": 0.0,
329
+ "count": 4922
330
+ },
331
+ "shared_redirected": {
332
+ "mean_shift": 4.39443286374644e-07,
333
+ "median_shift": 0.0,
334
+ "p95_abs_shift": 0.0,
335
+ "count": 5805
336
+ }
337
+ }
338
+ },
339
+ "total_features": 24576,
340
+ "fve_base": 0.6947082683678073,
341
+ "fve_aligned": 0.6870226214069346,
342
+ "fve_base_by_layer": [
343
+ 0.6774334142969541,
344
+ 0.6886444531810221,
345
+ 0.7180468839500587
346
+ ],
347
+ "fve_aligned_by_layer": [
348
+ 0.6701876355715447,
349
+ 0.6808480033699754,
350
+ 0.7100321675470362
351
+ ],
352
+ "val_fve_base_by_layer": [
353
+ 0.6774334142969541,
354
+ 0.6886444531810221,
355
+ 0.7180468839500587
356
+ ],
357
+ "val_fve_aligned_by_layer": [
358
+ 0.6701876355715447,
359
+ 0.6808480033699754,
360
+ 0.7100321675470362
361
+ ],
362
+ "dead_neuron_fraction": 0.9609958831845009,
363
+ "l0_sparsity_base": 77.30068727438727,
364
+ "l0_sparsity_aligned": 76.21462362437518,
365
+ "l0_base_by_layer": [
366
+ 65.64209354932866,
367
+ 76.70472124927029,
368
+ 89.5552393461763
369
+ ],
370
+ "l0_aligned_by_layer": [
371
+ 64.81115002918855,
372
+ 75.33169147694105,
373
+ 88.50102159953299
374
+ ],
375
+ "val_l0_base": 78.84989721487953,
376
+ "val_l0_aligned": 77.61215010238567,
377
+ "val_l0_base_by_layer": [
378
+ 66.79128493064361,
379
+ 78.28724914071448,
380
+ 91.47114966427469
381
+ ],
382
+ "val_l0_aligned_by_layer": [
383
+ 65.80791884816755,
384
+ 76.6645397166307,
385
+ 90.36398343390819
386
+ ],
387
+ "superposition_fraction": 0.0
388
+ }
v1/assembled_activations/llama32-3b-grpo/metrics/training_metrics.json ADDED
@@ -0,0 +1,524 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epochs": [
3
+ 1,
4
+ 2,
5
+ 3,
6
+ 4,
7
+ 5,
8
+ 6,
9
+ 7,
10
+ 8
11
+ ],
12
+ "train_loss": [
13
+ 0.07674359122668521,
14
+ 0.04322117879786689,
15
+ 0.041181389142373674,
16
+ 0.040074929835299825,
17
+ 0.0392595374999134,
18
+ 0.03860885386235068,
19
+ 0.038139252809800045,
20
+ 0.03787409050974036
21
+ ],
22
+ "val_loss": [
23
+ 0.045276696184230726,
24
+ 0.042513850998347966,
25
+ 0.04139913450829021,
26
+ 0.04075757585738966,
27
+ 0.04028706457368366,
28
+ 0.0400088208079026,
29
+ 0.039891897734859225,
30
+ 0.03987071945951247
31
+ ],
32
+ "train_fve_base": [
33
+ 0.5218191150209165,
34
+ 0.6581393129562538,
35
+ 0.6788746238589913,
36
+ 0.6907193261979967,
37
+ 0.6996899498740763,
38
+ 0.7067874284146894,
39
+ 0.7117742059248898,
40
+ 0.7144628610794682
41
+ ],
42
+ "train_fve_aligned": [
43
+ 0.5172356182036669,
44
+ 0.6521651021444651,
45
+ 0.6710452417915354,
46
+ 0.6816653398067298,
47
+ 0.6897432762005145,
48
+ 0.6963027055612866,
49
+ 0.7009537841100408,
50
+ 0.7034451776753374
51
+ ],
52
+ "val_fve_base": [
53
+ 0.6393531760620197,
54
+ 0.6663743301211852,
55
+ 0.6773919553656853,
56
+ 0.6840992772766433,
57
+ 0.6891960114708746,
58
+ 0.6927146209472137,
59
+ 0.6943349950600669,
60
+ 0.6947082683678073
61
+ ],
62
+ "val_fve_aligned": [
63
+ 0.634585750664716,
64
+ 0.660018102348787,
65
+ 0.6706369740800707,
66
+ 0.6769087074314737,
67
+ 0.68184863584828,
68
+ 0.6851667025326434,
69
+ 0.6866701738372523,
70
+ 0.6870226214069346
71
+ ],
72
+ "val_fve_base_by_layer": [
73
+ [
74
+ 0.6272152599863982,
75
+ 0.6332998275756836,
76
+ 0.6575443794590017
77
+ ],
78
+ [
79
+ 0.6521256428738539,
80
+ 0.6600078350586417,
81
+ 0.686989458755673
82
+ ],
83
+ [
84
+ 0.6621782736004335,
85
+ 0.6711501657026601,
86
+ 0.6988473628203906
87
+ ],
88
+ [
89
+ 0.6676703444950244,
90
+ 0.6779192161809712,
91
+ 0.7067082190388785
92
+ ],
93
+ [
94
+ 0.6722474247997344,
95
+ 0.6831871764198023,
96
+ 0.7121533692195153
97
+ ],
98
+ [
99
+ 0.6756168560207826,
100
+ 0.6865271073985474,
101
+ 0.7159998494917186
102
+ ],
103
+ [
104
+ 0.6770371470151771,
105
+ 0.6882066729805232,
106
+ 0.7177611193107685
107
+ ],
108
+ [
109
+ 0.6774334142969541,
110
+ 0.6886444531810221,
111
+ 0.7180468839500587
112
+ ]
113
+ ],
114
+ "val_fve_aligned_by_layer": [
115
+ [
116
+ 0.6233792255062083,
117
+ 0.6277059453944261,
118
+ 0.6526720202406039
119
+ ],
120
+ [
121
+ 0.6468782418685434,
122
+ 0.653206247933872,
123
+ 0.679969761072029
124
+ ],
125
+ [
126
+ 0.6562783268733798,
127
+ 0.6638215808344137,
128
+ 0.6918109536795092
129
+ ],
130
+ [
131
+ 0.6612628511109278,
132
+ 0.6705732148979346,
133
+ 0.6988899854465305
134
+ ],
135
+ [
136
+ 0.6654992702743769,
137
+ 0.6759732140920549,
138
+ 0.7040733623254986
139
+ ],
140
+ [
141
+ 0.6685321296696888,
142
+ 0.6789075569956714,
143
+ 0.7080603703778452
144
+ ],
145
+ [
146
+ 0.6698048367550236,
147
+ 0.6804219499932533,
148
+ 0.7097836757829676
149
+ ],
150
+ [
151
+ 0.6701876355715447,
152
+ 0.6808480033699754,
153
+ 0.7100321675470362
154
+ ]
155
+ ],
156
+ "train_fve_base_by_layer": [
157
+ [
158
+ 0.513117791544396,
159
+ 0.5155797629278442,
160
+ 0.5367597415999787
161
+ ],
162
+ [
163
+ 0.6441039162216587,
164
+ 0.6522654140823707,
165
+ 0.6780485487513144
166
+ ],
167
+ [
168
+ 0.6628799028933848,
169
+ 0.6730503911381914,
170
+ 0.7006935184626849
171
+ ],
172
+ [
173
+ 0.6733515275478085,
174
+ 0.685189020821593,
175
+ 0.7136173695760795
176
+ ],
177
+ [
178
+ 0.6809947529663068,
179
+ 0.6943080470148612,
180
+ 0.7237669889577564
181
+ ],
182
+ [
183
+ 0.6869359059912797,
184
+ 0.7013317935312274,
185
+ 0.7320945297704409
186
+ ],
187
+ [
188
+ 0.691114535056146,
189
+ 0.7062968335789126,
190
+ 0.7379111921794221
191
+ ],
192
+ [
193
+ 0.6934602800267382,
194
+ 0.7089756050794638,
195
+ 0.74095263758808
196
+ ]
197
+ ],
198
+ "train_fve_aligned_by_layer": [
199
+ [
200
+ 0.5089469564914425,
201
+ 0.510429343701244,
202
+ 0.5323305079203493
203
+ ],
204
+ [
205
+ 0.639565573195164,
206
+ 0.6456975710719076,
207
+ 0.6712321020745424
208
+ ],
209
+ [
210
+ 0.6564315080433928,
211
+ 0.664647905400672,
212
+ 0.6920562531957832
213
+ ],
214
+ [
215
+ 0.6654149732322493,
216
+ 0.6757753993500344,
217
+ 0.7038055851455328
218
+ ],
219
+ [
220
+ 0.6721183056032456,
221
+ 0.6841006580668447,
222
+ 0.7130108071013772
223
+ ],
224
+ [
225
+ 0.6774491685841143,
226
+ 0.6906084561306207,
227
+ 0.7208504371662552
228
+ ],
229
+ [
230
+ 0.6813007789749328,
231
+ 0.6952133505092472,
232
+ 0.726347162510593
233
+ ],
234
+ [
235
+ 0.6834543206437039,
236
+ 0.6976807647049253,
237
+ 0.729200392039422
238
+ ]
239
+ ],
240
+ "dead_neurons": [
241
+ 0.8841513978133616,
242
+ 0.9568707898076486,
243
+ 0.9615639534110711,
244
+ 0.9622459215026673,
245
+ 0.961871207317134,
246
+ 0.9613948494665677,
247
+ 0.9610488299939345,
248
+ 0.9609958831845009
249
+ ],
250
+ "l0_base": [
251
+ 121.136466098124,
252
+ 72.97896241891196,
253
+ 72.2001569606516,
254
+ 73.32550964979004,
255
+ 74.82235363249325,
256
+ 76.16007140917313,
257
+ 77.00653347409543,
258
+ 77.30068727438727
259
+ ],
260
+ "l0_aligned": [
261
+ 118.93421989481003,
262
+ 71.53167821925354,
263
+ 71.0108692305031,
264
+ 72.31692343431818,
265
+ 73.82103408127588,
266
+ 75.11973643929235,
267
+ 75.93497887481672,
268
+ 76.21462362437518
269
+ ],
270
+ "l0_base_by_layer": [
271
+ [
272
+ 111.43872227086982,
273
+ 118.30589608873322,
274
+ 133.66476941039113
275
+ ],
276
+ [
277
+ 62.302958990075894,
278
+ 71.08327860478693,
279
+ 85.55064214827787
280
+ ],
281
+ [
282
+ 61.694559982486865,
283
+ 70.65915061295972,
284
+ 84.24675277291301
285
+ ],
286
+ [
287
+ 62.596194541739635,
288
+ 72.23266929363689,
289
+ 85.14765761821366
290
+ ],
291
+ [
292
+ 63.748741243432576,
293
+ 74.00266345592527,
294
+ 86.71564871570344
295
+ ],
296
+ [
297
+ 64.77014010507881,
298
+ 75.4961325160537,
299
+ 88.21393388791594
300
+ ],
301
+ [
302
+ 65.40331290134267,
303
+ 76.38590922358435,
304
+ 89.23037069468768
305
+ ],
306
+ [
307
+ 65.64209354932866,
308
+ 76.70472124927029,
309
+ 89.5552393461763
310
+ ]
311
+ ],
312
+ "l0_aligned_by_layer": [
313
+ [
314
+ 109.56592965557502,
315
+ 115.97254451255108,
316
+ 131.26417469352015
317
+ ],
318
+ [
319
+ 61.350700525394046,
320
+ 69.53880253940456,
321
+ 83.70552393461763
322
+ ],
323
+ [
324
+ 60.85610040863981,
325
+ 69.35692133683597,
326
+ 82.81957822533568
327
+ ],
328
+ [
329
+ 61.81104057209574,
330
+ 71.03205268534734,
331
+ 84.10766929363689
332
+ ],
333
+ [
334
+ 62.96254743140689,
335
+ 72.73571584938703,
336
+ 85.76483143607706
337
+ ],
338
+ [
339
+ 63.978528166958554,
340
+ 74.16905647985989,
341
+ 87.21161704611792
342
+ ],
343
+ [
344
+ 64.58822241681261,
345
+ 75.03416885580852,
346
+ 88.18253794512552
347
+ ],
348
+ [
349
+ 64.81115002918855,
350
+ 75.33169147694105,
351
+ 88.50102159953299
352
+ ]
353
+ ],
354
+ "val_l0_base": [
355
+ 76.48782280727207,
356
+ 73.48593175598464,
357
+ 73.73878610820671,
358
+ 75.04906807145524,
359
+ 76.41334242096747,
360
+ 77.9022164469614,
361
+ 78.6846667384602,
362
+ 78.84989721487953
363
+ ],
364
+ "val_l0_aligned": [
365
+ 75.0534313261821,
366
+ 72.0293619844926,
367
+ 72.63701885283305,
368
+ 73.83349936919687,
369
+ 75.38107467571479,
370
+ 76.78896056170238,
371
+ 77.48427745559452,
372
+ 77.61215010238567
373
+ ],
374
+ "val_l0_base_by_layer": [
375
+ [
376
+ 65.50414485831536,
377
+ 74.47414921465969,
378
+ 89.48516582069597
379
+ ],
380
+ [
381
+ 63.45653360177085,
382
+ 71.41803010471205,
383
+ 85.58322427160452
384
+ ],
385
+ [
386
+ 63.403250449615,
387
+ 72.4886562087773,
388
+ 85.32444371727749
389
+ ],
390
+ [
391
+ 63.711605611271885,
392
+ 74.16361256544502,
393
+ 87.2719786479211
394
+ ],
395
+ [
396
+ 64.96951353862023,
397
+ 75.69464442867259,
398
+ 88.5758617061595
399
+ ],
400
+ [
401
+ 66.02416013547888,
402
+ 77.07302576833995,
403
+ 90.60945680628272
404
+ ],
405
+ [
406
+ 66.63781634425618,
407
+ 78.01167105629806,
408
+ 91.40450482593157
409
+ ],
410
+ [
411
+ 66.79128493064361,
412
+ 78.28724914071448,
413
+ 91.47114966427469
414
+ ]
415
+ ],
416
+ "val_l0_aligned_by_layer": [
417
+ [
418
+ 64.29243019862949,
419
+ 73.13732550655985,
420
+ 87.73053010471205
421
+ ],
422
+ [
423
+ 61.9127945026178,
424
+ 70.0345222513089,
425
+ 84.14076137043419
426
+ ],
427
+ [
428
+ 62.49950916230367,
429
+ 70.98451137043419,
430
+ 84.42702879581152
431
+ ],
432
+ [
433
+ 62.823243905112385,
434
+ 72.5755890052356,
435
+ 86.10165796729282
436
+ ],
437
+ [
438
+ 64.12390925622111,
439
+ 74.49552793652599,
440
+ 87.52377838614099
441
+ ],
442
+ [
443
+ 65.08524214659685,
444
+ 75.61262000917765,
445
+ 89.66901178010471
446
+ ],
447
+ [
448
+ 65.6529777486911,
449
+ 76.40794068980591,
450
+ 90.3919066583923
451
+ ],
452
+ [
453
+ 65.80791884816755,
454
+ 76.6645397166307,
455
+ 90.36398343390819
456
+ ]
457
+ ],
458
+ "self_recon": [
459
+ 0.03779663238511299,
460
+ 0.027047466899888666,
461
+ 0.02546192713786445,
462
+ 0.02456349793710663,
463
+ 0.023879754270594925,
464
+ 0.023328691720962524,
465
+ 0.02293838971385257,
466
+ 0.022728793613364735
467
+ ],
468
+ "cross_recon": [
469
+ 0.03785394466065101,
470
+ 0.027076600934990442,
471
+ 0.025491758700410178,
472
+ 0.0246022383432881,
473
+ 0.023943013571946507,
474
+ 0.02341594385669305,
475
+ 0.02303808376897655,
476
+ 0.02283220853215063
477
+ ],
478
+ "sparsity": [
479
+ 0.02380538061810293,
480
+ 0.005343071211149,
481
+ 0.005522758250330807,
482
+ 0.005670536276316232,
483
+ 0.005802577443560069,
484
+ 0.005913784346878703,
485
+ 0.005985629326738395,
486
+ 0.006012413198138129
487
+ ],
488
+ "val_self_recon": [
489
+ 0.028536553528764486,
490
+ 0.02644324319251857,
491
+ 0.02557803145292854,
492
+ 0.025054661449805605,
493
+ 0.0246538199016249,
494
+ 0.02437961772474319,
495
+ 0.024254350498786773,
496
+ 0.024226347998720813
497
+ ],
498
+ "val_cross_recon": [
499
+ 0.028564947554890398,
500
+ 0.02647020462997921,
501
+ 0.0256076222236868,
502
+ 0.02508811676065335,
503
+ 0.024693033419042356,
504
+ 0.024424381593135015,
505
+ 0.024301334952262683,
506
+ 0.02427353766498141
507
+ ],
508
+ "val_sparsity": [
509
+ 0.005314163556468737,
510
+ 0.0054825257890277505,
511
+ 0.005578053820176118,
512
+ 0.005667667375165599,
513
+ 0.005756030966045033,
514
+ 0.005859450251839236,
515
+ 0.00591701309620866,
516
+ 0.005934955881352668
517
+ ],
518
+ "layers": [
519
+ 12,
520
+ 13,
521
+ 14
522
+ ],
523
+ "topk_mode": "model_balanced_layer_agg"
524
+ }
v1/assembled_activations/llama32-3b-grpo/plots/aligned_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: f179fead9df779fc48f1573c2c8935981a29cbfff6f8d8d718ff48aa8df82c89
  • Pointer size: 131 Bytes
  • Size of remote file: 188 kB
v1/assembled_activations/llama32-3b-grpo/plots/base_decoder_norm_heatmap.png ADDED

Git LFS Details

  • SHA256: bfd400890bbf5fa81872f9292127c79bbe7e37c53425f8f94ddab09c91ffa1af
  • Pointer size: 131 Bytes
  • Size of remote file: 187 kB
v1/assembled_activations/llama32-3b-grpo/plots/cf_shift_by_layer.png ADDED

Git LFS Details

  • SHA256: 5bb386efba3e01de7132c86769c5ab2467c86e965676e39b6804d118f6f6e947
  • Pointer size: 130 Bytes
  • Size of remote file: 44 kB
v1/assembled_activations/llama32-3b-grpo/plots/cf_shift_p95_by_layer.png ADDED

Git LFS Details

  • SHA256: 2f3b143076d09c6c52194fecb5886c0e0f1e098b84123d39c5c86d8f8cb074f5
  • Pointer size: 131 Bytes
  • Size of remote file: 115 kB
v1/assembled_activations/llama32-3b-grpo/plots/class_distribution_multilayer.png ADDED

Git LFS Details

  • SHA256: 7c559df4d6cd1e545ff36ab23fd197f9fa87447acfb963436384f9f623a306b9
  • Pointer size: 130 Bytes
  • Size of remote file: 83.8 kB
v1/assembled_activations/llama32-3b-grpo/plots/class_distribution_primary.png ADDED

Git LFS Details

  • SHA256: b2c54e345977052040bd492700c2efbfc3faa119ba84c6cdf0c4909996b99c1a
  • Pointer size: 131 Bytes
  • Size of remote file: 146 kB
v1/assembled_activations/llama32-3b-grpo/plots/cross_layer_cosine_drift_by_stream.png ADDED

Git LFS Details

  • SHA256: dba3d52fd2c05d42436711a636ec7fec8017f8330e5f53518818204a15237b9d
  • Pointer size: 130 Bytes
  • Size of remote file: 54.8 kB
v1/assembled_activations/llama32-3b-grpo/plots/decoder_norm_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: 2515fba00de16bea47142354e608726d523e141e83683d72869fbf00e3503fb7
  • Pointer size: 130 Bytes
  • Size of remote file: 48.3 kB
v1/assembled_activations/llama32-3b-grpo/plots/feature_layer_trajectories.png ADDED

Git LFS Details

  • SHA256: 4a73b77474e8fbf6a78d6cbd0695a5d67f990f1cc292e6031ee8de43627f3547
  • Pointer size: 131 Bytes
  • Size of remote file: 546 kB
v1/assembled_activations/llama32-3b-grpo/plots/feature_sharing_ratio_by_layer.png ADDED

Git LFS Details

  • SHA256: dcafa9ca1fa1e781a7d60ecf8502db14008eac8b7c2678709237c2f5b3a9b06e
  • Pointer size: 130 Bytes
  • Size of remote file: 57.6 kB
v1/assembled_activations/llama32-3b-grpo/plots/fve_by_layer.png ADDED

Git LFS Details

  • SHA256: 93571c4873c5d7f84f476cd5c6582db96691d24abb6e345facc4d486c30b28ef
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
v1/assembled_activations/llama32-3b-grpo/plots/l0_by_layer.png ADDED

Git LFS Details

  • SHA256: c2fc980a2f0f5dcc8ba4a644636fb766d284cf60df2ebf64917bfac75ee4d897
  • Pointer size: 130 Bytes
  • Size of remote file: 92.5 kB
v1/assembled_activations/llama32-3b-grpo/plots/layer_concentration_entropy.png ADDED

Git LFS Details

  • SHA256: 92f0be3690bcd9243a1f980183b136cd5cbddd0f6e2b504eb3bfa2bac984c79e
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
v1/assembled_activations/llama32-3b-grpo/plots/loss_curves.png ADDED

Git LFS Details

  • SHA256: 739a40dc8fd23f021c9b72ea77bfad3db88f584c768cd77d7af59920e72f520e
  • Pointer size: 131 Bytes
  • Size of remote file: 323 kB
v1/assembled_activations/llama32-3b-grpo/plots/max_norm_layer_migration.png ADDED

Git LFS Details

  • SHA256: 6fa23c28164671245154f076e84ff9cd09b4cf932b309e41403733bc1440b289
  • Pointer size: 130 Bytes
  • Size of remote file: 81.2 kB
v1/assembled_activations/llama32-3b-grpo/plots/rho_histogram_by_layer.png ADDED

Git LFS Details

  • SHA256: f278e30528e6592094a892b88b16e4d1a05426c7a4d6694829eb14d121193fa8
  • Pointer size: 130 Bytes
  • Size of remote file: 55.2 kB
v1/assembled_activations/llama32-3b-grpo/plots/rho_theta_scatter_by_layer.png ADDED

Git LFS Details

  • SHA256: 5ed44496c640f9da695f3945cf65adf07379916dacb1534b9ef92866b40566d6
  • Pointer size: 132 Bytes
  • Size of remote file: 1.16 MB
v1/assembled_activations/llama32-3b-grpo/plots/superposition_by_layer.png ADDED

Git LFS Details

  • SHA256: 76c16daa278a0021d7aa1e9c1dc0c106a443eec54dfed016e3554bdfefb888db
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
v1/assembled_activations/llama32-3b-grpo/plots/theta_by_layer.png ADDED

Git LFS Details

  • SHA256: acc1830012535134a7ba4f26b998825bc03984ccc4b6d479339b36b890d603c3
  • Pointer size: 130 Bytes
  • Size of remote file: 51.6 kB