Devy1's picture
Upload checkpoint Qwen2.5-Coder-CONTROL-checkpoints_python_only_2k-1.5B-Base-8 at epoch 8
19b5be4 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.0,
"eval_steps": 500,
"global_step": 920,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008705114254624592,
"grad_norm": 1.703125,
"learning_rate": 5e-05,
"loss": 0.8632,
"step": 1
},
{
"epoch": 0.017410228509249184,
"grad_norm": 2.015625,
"learning_rate": 4.9956521739130436e-05,
"loss": 0.7891,
"step": 2
},
{
"epoch": 0.026115342763873776,
"grad_norm": 8.375,
"learning_rate": 4.9913043478260876e-05,
"loss": 0.7896,
"step": 3
},
{
"epoch": 0.03482045701849837,
"grad_norm": 3.328125,
"learning_rate": 4.986956521739131e-05,
"loss": 0.9271,
"step": 4
},
{
"epoch": 0.04352557127312296,
"grad_norm": 1.640625,
"learning_rate": 4.9826086956521736e-05,
"loss": 0.9154,
"step": 5
},
{
"epoch": 0.05223068552774755,
"grad_norm": 1.5390625,
"learning_rate": 4.9782608695652176e-05,
"loss": 0.9267,
"step": 6
},
{
"epoch": 0.060935799782372145,
"grad_norm": 2.71875,
"learning_rate": 4.973913043478261e-05,
"loss": 0.8189,
"step": 7
},
{
"epoch": 0.06964091403699674,
"grad_norm": 1.5859375,
"learning_rate": 4.969565217391304e-05,
"loss": 0.8018,
"step": 8
},
{
"epoch": 0.07834602829162132,
"grad_norm": 1.359375,
"learning_rate": 4.9652173913043483e-05,
"loss": 0.9439,
"step": 9
},
{
"epoch": 0.08705114254624592,
"grad_norm": 2.375,
"learning_rate": 4.960869565217392e-05,
"loss": 0.8444,
"step": 10
},
{
"epoch": 0.0957562568008705,
"grad_norm": 2.125,
"learning_rate": 4.956521739130435e-05,
"loss": 1.013,
"step": 11
},
{
"epoch": 0.1044613710554951,
"grad_norm": 1.7109375,
"learning_rate": 4.9521739130434784e-05,
"loss": 1.1719,
"step": 12
},
{
"epoch": 0.11316648531011969,
"grad_norm": 1.4296875,
"learning_rate": 4.947826086956522e-05,
"loss": 1.0034,
"step": 13
},
{
"epoch": 0.12187159956474429,
"grad_norm": 1.25,
"learning_rate": 4.943478260869566e-05,
"loss": 1.0015,
"step": 14
},
{
"epoch": 0.1305767138193689,
"grad_norm": 1.25,
"learning_rate": 4.939130434782609e-05,
"loss": 0.9413,
"step": 15
},
{
"epoch": 0.13928182807399347,
"grad_norm": 1.5546875,
"learning_rate": 4.9347826086956524e-05,
"loss": 1.0435,
"step": 16
},
{
"epoch": 0.14798694232861806,
"grad_norm": 2.265625,
"learning_rate": 4.930434782608696e-05,
"loss": 0.9285,
"step": 17
},
{
"epoch": 0.15669205658324264,
"grad_norm": 1.3046875,
"learning_rate": 4.926086956521739e-05,
"loss": 0.91,
"step": 18
},
{
"epoch": 0.16539717083786726,
"grad_norm": 1.296875,
"learning_rate": 4.9217391304347824e-05,
"loss": 0.937,
"step": 19
},
{
"epoch": 0.17410228509249184,
"grad_norm": 1.28125,
"learning_rate": 4.9173913043478265e-05,
"loss": 0.9194,
"step": 20
},
{
"epoch": 0.18280739934711643,
"grad_norm": 1.203125,
"learning_rate": 4.91304347826087e-05,
"loss": 0.9392,
"step": 21
},
{
"epoch": 0.191512513601741,
"grad_norm": 1.3203125,
"learning_rate": 4.908695652173913e-05,
"loss": 0.935,
"step": 22
},
{
"epoch": 0.20021762785636563,
"grad_norm": 1.2421875,
"learning_rate": 4.904347826086957e-05,
"loss": 0.7352,
"step": 23
},
{
"epoch": 0.2089227421109902,
"grad_norm": 1.2578125,
"learning_rate": 4.9e-05,
"loss": 0.8006,
"step": 24
},
{
"epoch": 0.2176278563656148,
"grad_norm": 1.3046875,
"learning_rate": 4.895652173913044e-05,
"loss": 1.1003,
"step": 25
},
{
"epoch": 0.22633297062023938,
"grad_norm": 1.578125,
"learning_rate": 4.891304347826087e-05,
"loss": 0.8786,
"step": 26
},
{
"epoch": 0.235038084874864,
"grad_norm": 1.390625,
"learning_rate": 4.8869565217391305e-05,
"loss": 0.9133,
"step": 27
},
{
"epoch": 0.24374319912948858,
"grad_norm": 1.109375,
"learning_rate": 4.8826086956521746e-05,
"loss": 0.9822,
"step": 28
},
{
"epoch": 0.25244831338411317,
"grad_norm": 1.6640625,
"learning_rate": 4.878260869565218e-05,
"loss": 1.0016,
"step": 29
},
{
"epoch": 0.2611534276387378,
"grad_norm": 1.34375,
"learning_rate": 4.873913043478261e-05,
"loss": 1.1215,
"step": 30
},
{
"epoch": 0.26985854189336234,
"grad_norm": 1.171875,
"learning_rate": 4.8695652173913046e-05,
"loss": 1.1457,
"step": 31
},
{
"epoch": 0.27856365614798695,
"grad_norm": 1.3671875,
"learning_rate": 4.865217391304348e-05,
"loss": 1.0368,
"step": 32
},
{
"epoch": 0.28726877040261156,
"grad_norm": 1.140625,
"learning_rate": 4.860869565217391e-05,
"loss": 0.9539,
"step": 33
},
{
"epoch": 0.2959738846572361,
"grad_norm": 1.4296875,
"learning_rate": 4.856521739130435e-05,
"loss": 0.9165,
"step": 34
},
{
"epoch": 0.30467899891186073,
"grad_norm": 1.2265625,
"learning_rate": 4.8521739130434786e-05,
"loss": 1.03,
"step": 35
},
{
"epoch": 0.3133841131664853,
"grad_norm": 1.5703125,
"learning_rate": 4.847826086956522e-05,
"loss": 1.2848,
"step": 36
},
{
"epoch": 0.3220892274211099,
"grad_norm": 1.21875,
"learning_rate": 4.843478260869565e-05,
"loss": 0.8663,
"step": 37
},
{
"epoch": 0.3307943416757345,
"grad_norm": 1.125,
"learning_rate": 4.839130434782609e-05,
"loss": 0.9547,
"step": 38
},
{
"epoch": 0.3394994559303591,
"grad_norm": 1.328125,
"learning_rate": 4.834782608695652e-05,
"loss": 0.872,
"step": 39
},
{
"epoch": 0.3482045701849837,
"grad_norm": 1.140625,
"learning_rate": 4.830434782608696e-05,
"loss": 0.8424,
"step": 40
},
{
"epoch": 0.35690968443960824,
"grad_norm": 1.2578125,
"learning_rate": 4.8260869565217394e-05,
"loss": 0.8246,
"step": 41
},
{
"epoch": 0.36561479869423286,
"grad_norm": 1.3046875,
"learning_rate": 4.8217391304347834e-05,
"loss": 0.8326,
"step": 42
},
{
"epoch": 0.37431991294885747,
"grad_norm": 1.2109375,
"learning_rate": 4.817391304347826e-05,
"loss": 0.9043,
"step": 43
},
{
"epoch": 0.383025027203482,
"grad_norm": 1.296875,
"learning_rate": 4.8130434782608694e-05,
"loss": 0.8888,
"step": 44
},
{
"epoch": 0.39173014145810664,
"grad_norm": 1.2265625,
"learning_rate": 4.8086956521739134e-05,
"loss": 0.778,
"step": 45
},
{
"epoch": 0.40043525571273125,
"grad_norm": 1.0546875,
"learning_rate": 4.804347826086957e-05,
"loss": 0.8684,
"step": 46
},
{
"epoch": 0.4091403699673558,
"grad_norm": 1.5078125,
"learning_rate": 4.8e-05,
"loss": 0.9351,
"step": 47
},
{
"epoch": 0.4178454842219804,
"grad_norm": 1.2421875,
"learning_rate": 4.795652173913044e-05,
"loss": 0.8708,
"step": 48
},
{
"epoch": 0.426550598476605,
"grad_norm": 1.28125,
"learning_rate": 4.7913043478260875e-05,
"loss": 0.9919,
"step": 49
},
{
"epoch": 0.4352557127312296,
"grad_norm": 1.1953125,
"learning_rate": 4.78695652173913e-05,
"loss": 0.8166,
"step": 50
},
{
"epoch": 0.4439608269858542,
"grad_norm": 1.3203125,
"learning_rate": 4.782608695652174e-05,
"loss": 0.9542,
"step": 51
},
{
"epoch": 0.45266594124047876,
"grad_norm": 1.2109375,
"learning_rate": 4.7782608695652175e-05,
"loss": 1.0959,
"step": 52
},
{
"epoch": 0.4613710554951034,
"grad_norm": 1.140625,
"learning_rate": 4.773913043478261e-05,
"loss": 0.8695,
"step": 53
},
{
"epoch": 0.470076169749728,
"grad_norm": 1.5234375,
"learning_rate": 4.769565217391305e-05,
"loss": 1.1411,
"step": 54
},
{
"epoch": 0.47878128400435255,
"grad_norm": 1.125,
"learning_rate": 4.765217391304348e-05,
"loss": 0.8354,
"step": 55
},
{
"epoch": 0.48748639825897716,
"grad_norm": 1.390625,
"learning_rate": 4.7608695652173916e-05,
"loss": 0.9133,
"step": 56
},
{
"epoch": 0.4961915125136017,
"grad_norm": 1.2578125,
"learning_rate": 4.756521739130435e-05,
"loss": 0.9042,
"step": 57
},
{
"epoch": 0.5048966267682263,
"grad_norm": 1.28125,
"learning_rate": 4.752173913043478e-05,
"loss": 0.9707,
"step": 58
},
{
"epoch": 0.5136017410228509,
"grad_norm": 1.2734375,
"learning_rate": 4.747826086956522e-05,
"loss": 0.7845,
"step": 59
},
{
"epoch": 0.5223068552774756,
"grad_norm": 1.3046875,
"learning_rate": 4.7434782608695656e-05,
"loss": 0.955,
"step": 60
},
{
"epoch": 0.5310119695321001,
"grad_norm": 1.546875,
"learning_rate": 4.739130434782609e-05,
"loss": 0.9521,
"step": 61
},
{
"epoch": 0.5397170837867247,
"grad_norm": 1.2578125,
"learning_rate": 4.734782608695652e-05,
"loss": 1.0155,
"step": 62
},
{
"epoch": 0.5484221980413493,
"grad_norm": 1.3125,
"learning_rate": 4.7304347826086956e-05,
"loss": 1.0093,
"step": 63
},
{
"epoch": 0.5571273122959739,
"grad_norm": 1.234375,
"learning_rate": 4.726086956521739e-05,
"loss": 0.8372,
"step": 64
},
{
"epoch": 0.5658324265505985,
"grad_norm": 1.0703125,
"learning_rate": 4.721739130434783e-05,
"loss": 0.8776,
"step": 65
},
{
"epoch": 0.5745375408052231,
"grad_norm": 1.2265625,
"learning_rate": 4.7173913043478264e-05,
"loss": 0.8854,
"step": 66
},
{
"epoch": 0.5832426550598476,
"grad_norm": 1.1171875,
"learning_rate": 4.71304347826087e-05,
"loss": 0.8966,
"step": 67
},
{
"epoch": 0.5919477693144722,
"grad_norm": 1.296875,
"learning_rate": 4.708695652173914e-05,
"loss": 0.9694,
"step": 68
},
{
"epoch": 0.6006528835690969,
"grad_norm": 1.1875,
"learning_rate": 4.7043478260869564e-05,
"loss": 1.062,
"step": 69
},
{
"epoch": 0.6093579978237215,
"grad_norm": 1.078125,
"learning_rate": 4.7e-05,
"loss": 0.8369,
"step": 70
},
{
"epoch": 0.6180631120783461,
"grad_norm": 1.1953125,
"learning_rate": 4.695652173913044e-05,
"loss": 0.7927,
"step": 71
},
{
"epoch": 0.6267682263329706,
"grad_norm": 1.4140625,
"learning_rate": 4.691304347826087e-05,
"loss": 1.3626,
"step": 72
},
{
"epoch": 0.6354733405875952,
"grad_norm": 1.1328125,
"learning_rate": 4.686956521739131e-05,
"loss": 0.7737,
"step": 73
},
{
"epoch": 0.6441784548422198,
"grad_norm": 1.0859375,
"learning_rate": 4.6826086956521745e-05,
"loss": 0.9044,
"step": 74
},
{
"epoch": 0.6528835690968444,
"grad_norm": 1.078125,
"learning_rate": 4.678260869565218e-05,
"loss": 0.9451,
"step": 75
},
{
"epoch": 0.661588683351469,
"grad_norm": 1.234375,
"learning_rate": 4.673913043478261e-05,
"loss": 1.0073,
"step": 76
},
{
"epoch": 0.6702937976060935,
"grad_norm": 1.1484375,
"learning_rate": 4.6695652173913045e-05,
"loss": 1.0206,
"step": 77
},
{
"epoch": 0.6789989118607181,
"grad_norm": 1.25,
"learning_rate": 4.665217391304348e-05,
"loss": 0.7854,
"step": 78
},
{
"epoch": 0.6877040261153428,
"grad_norm": 1.0859375,
"learning_rate": 4.660869565217392e-05,
"loss": 0.9661,
"step": 79
},
{
"epoch": 0.6964091403699674,
"grad_norm": 1.296875,
"learning_rate": 4.656521739130435e-05,
"loss": 1.0657,
"step": 80
},
{
"epoch": 0.705114254624592,
"grad_norm": 1.0390625,
"learning_rate": 4.6521739130434785e-05,
"loss": 0.6992,
"step": 81
},
{
"epoch": 0.7138193688792165,
"grad_norm": 1.0703125,
"learning_rate": 4.647826086956522e-05,
"loss": 0.8135,
"step": 82
},
{
"epoch": 0.7225244831338411,
"grad_norm": 1.359375,
"learning_rate": 4.643478260869565e-05,
"loss": 1.1033,
"step": 83
},
{
"epoch": 0.7312295973884657,
"grad_norm": 1.3828125,
"learning_rate": 4.6391304347826086e-05,
"loss": 0.8077,
"step": 84
},
{
"epoch": 0.7399347116430903,
"grad_norm": 1.0859375,
"learning_rate": 4.6347826086956526e-05,
"loss": 0.8582,
"step": 85
},
{
"epoch": 0.7486398258977149,
"grad_norm": 1.140625,
"learning_rate": 4.630434782608696e-05,
"loss": 1.0223,
"step": 86
},
{
"epoch": 0.7573449401523396,
"grad_norm": 1.2734375,
"learning_rate": 4.62608695652174e-05,
"loss": 0.7866,
"step": 87
},
{
"epoch": 0.766050054406964,
"grad_norm": 1.1484375,
"learning_rate": 4.6217391304347826e-05,
"loss": 0.804,
"step": 88
},
{
"epoch": 0.7747551686615887,
"grad_norm": 0.96484375,
"learning_rate": 4.617391304347826e-05,
"loss": 0.681,
"step": 89
},
{
"epoch": 0.7834602829162133,
"grad_norm": 1.0078125,
"learning_rate": 4.61304347826087e-05,
"loss": 0.9594,
"step": 90
},
{
"epoch": 0.7921653971708379,
"grad_norm": 1.0546875,
"learning_rate": 4.608695652173913e-05,
"loss": 0.9842,
"step": 91
},
{
"epoch": 0.8008705114254625,
"grad_norm": 1.09375,
"learning_rate": 4.6043478260869567e-05,
"loss": 0.9142,
"step": 92
},
{
"epoch": 0.809575625680087,
"grad_norm": 1.15625,
"learning_rate": 4.600000000000001e-05,
"loss": 0.7379,
"step": 93
},
{
"epoch": 0.8182807399347116,
"grad_norm": 1.3984375,
"learning_rate": 4.595652173913044e-05,
"loss": 1.0656,
"step": 94
},
{
"epoch": 0.8269858541893362,
"grad_norm": 1.15625,
"learning_rate": 4.591304347826087e-05,
"loss": 0.9404,
"step": 95
},
{
"epoch": 0.8356909684439608,
"grad_norm": 1.109375,
"learning_rate": 4.586956521739131e-05,
"loss": 1.0705,
"step": 96
},
{
"epoch": 0.8443960826985855,
"grad_norm": 1.1171875,
"learning_rate": 4.582608695652174e-05,
"loss": 1.1848,
"step": 97
},
{
"epoch": 0.85310119695321,
"grad_norm": 1.15625,
"learning_rate": 4.5782608695652174e-05,
"loss": 0.8771,
"step": 98
},
{
"epoch": 0.8618063112078346,
"grad_norm": 1.265625,
"learning_rate": 4.5739130434782614e-05,
"loss": 0.9586,
"step": 99
},
{
"epoch": 0.8705114254624592,
"grad_norm": 1.1015625,
"learning_rate": 4.569565217391305e-05,
"loss": 0.7516,
"step": 100
},
{
"epoch": 0.8792165397170838,
"grad_norm": 1.3125,
"learning_rate": 4.565217391304348e-05,
"loss": 0.8131,
"step": 101
},
{
"epoch": 0.8879216539717084,
"grad_norm": 1.109375,
"learning_rate": 4.5608695652173914e-05,
"loss": 0.9118,
"step": 102
},
{
"epoch": 0.8966267682263329,
"grad_norm": 1.4140625,
"learning_rate": 4.556521739130435e-05,
"loss": 0.8855,
"step": 103
},
{
"epoch": 0.9053318824809575,
"grad_norm": 1.1796875,
"learning_rate": 4.552173913043479e-05,
"loss": 0.7721,
"step": 104
},
{
"epoch": 0.9140369967355821,
"grad_norm": 1.2265625,
"learning_rate": 4.547826086956522e-05,
"loss": 0.7768,
"step": 105
},
{
"epoch": 0.9227421109902068,
"grad_norm": 1.0703125,
"learning_rate": 4.5434782608695655e-05,
"loss": 0.9557,
"step": 106
},
{
"epoch": 0.9314472252448314,
"grad_norm": 1.0625,
"learning_rate": 4.539130434782609e-05,
"loss": 0.8868,
"step": 107
},
{
"epoch": 0.940152339499456,
"grad_norm": 1.15625,
"learning_rate": 4.534782608695652e-05,
"loss": 0.8889,
"step": 108
},
{
"epoch": 0.9488574537540805,
"grad_norm": 1.15625,
"learning_rate": 4.5304347826086955e-05,
"loss": 1.1122,
"step": 109
},
{
"epoch": 0.9575625680087051,
"grad_norm": 1.1875,
"learning_rate": 4.5260869565217395e-05,
"loss": 0.7954,
"step": 110
},
{
"epoch": 0.9662676822633297,
"grad_norm": 1.1796875,
"learning_rate": 4.521739130434783e-05,
"loss": 0.8551,
"step": 111
},
{
"epoch": 0.9749727965179543,
"grad_norm": 1.1953125,
"learning_rate": 4.517391304347826e-05,
"loss": 1.0285,
"step": 112
},
{
"epoch": 0.9836779107725789,
"grad_norm": 1.25,
"learning_rate": 4.51304347826087e-05,
"loss": 1.0737,
"step": 113
},
{
"epoch": 0.9923830250272034,
"grad_norm": 1.015625,
"learning_rate": 4.508695652173913e-05,
"loss": 0.8448,
"step": 114
},
{
"epoch": 1.0,
"grad_norm": 1.2421875,
"learning_rate": 4.504347826086956e-05,
"loss": 0.9171,
"step": 115
},
{
"epoch": 1.0087051142546246,
"grad_norm": 1.234375,
"learning_rate": 4.5e-05,
"loss": 0.7003,
"step": 116
},
{
"epoch": 1.0174102285092492,
"grad_norm": 1.5078125,
"learning_rate": 4.4956521739130436e-05,
"loss": 0.7938,
"step": 117
},
{
"epoch": 1.0261153427638738,
"grad_norm": 1.09375,
"learning_rate": 4.4913043478260876e-05,
"loss": 0.7323,
"step": 118
},
{
"epoch": 1.0348204570184985,
"grad_norm": 1.2421875,
"learning_rate": 4.486956521739131e-05,
"loss": 0.5957,
"step": 119
},
{
"epoch": 1.043525571273123,
"grad_norm": 1.453125,
"learning_rate": 4.482608695652174e-05,
"loss": 0.7519,
"step": 120
},
{
"epoch": 1.0522306855277475,
"grad_norm": 1.4453125,
"learning_rate": 4.478260869565218e-05,
"loss": 0.7003,
"step": 121
},
{
"epoch": 1.060935799782372,
"grad_norm": 1.4296875,
"learning_rate": 4.473913043478261e-05,
"loss": 0.8537,
"step": 122
},
{
"epoch": 1.0696409140369967,
"grad_norm": 1.2265625,
"learning_rate": 4.4695652173913044e-05,
"loss": 0.7305,
"step": 123
},
{
"epoch": 1.0783460282916213,
"grad_norm": 1.3203125,
"learning_rate": 4.4652173913043484e-05,
"loss": 0.6122,
"step": 124
},
{
"epoch": 1.087051142546246,
"grad_norm": 1.296875,
"learning_rate": 4.460869565217392e-05,
"loss": 0.9224,
"step": 125
},
{
"epoch": 1.0957562568008705,
"grad_norm": 0.94921875,
"learning_rate": 4.456521739130435e-05,
"loss": 0.6261,
"step": 126
},
{
"epoch": 1.1044613710554951,
"grad_norm": 1.125,
"learning_rate": 4.4521739130434784e-05,
"loss": 0.7336,
"step": 127
},
{
"epoch": 1.1131664853101197,
"grad_norm": 1.125,
"learning_rate": 4.447826086956522e-05,
"loss": 0.6978,
"step": 128
},
{
"epoch": 1.1218715995647444,
"grad_norm": 1.125,
"learning_rate": 4.443478260869565e-05,
"loss": 0.6352,
"step": 129
},
{
"epoch": 1.130576713819369,
"grad_norm": 1.125,
"learning_rate": 4.439130434782609e-05,
"loss": 0.4918,
"step": 130
},
{
"epoch": 1.1392818280739934,
"grad_norm": 1.25,
"learning_rate": 4.4347826086956525e-05,
"loss": 0.9074,
"step": 131
},
{
"epoch": 1.147986942328618,
"grad_norm": 1.296875,
"learning_rate": 4.430434782608696e-05,
"loss": 0.7653,
"step": 132
},
{
"epoch": 1.1566920565832426,
"grad_norm": 1.046875,
"learning_rate": 4.426086956521739e-05,
"loss": 0.6195,
"step": 133
},
{
"epoch": 1.1653971708378672,
"grad_norm": 1.1953125,
"learning_rate": 4.4217391304347825e-05,
"loss": 0.5887,
"step": 134
},
{
"epoch": 1.1741022850924918,
"grad_norm": 2.15625,
"learning_rate": 4.4173913043478265e-05,
"loss": 0.7727,
"step": 135
},
{
"epoch": 1.1828073993471164,
"grad_norm": 1.0703125,
"learning_rate": 4.41304347826087e-05,
"loss": 0.598,
"step": 136
},
{
"epoch": 1.191512513601741,
"grad_norm": 1.2265625,
"learning_rate": 4.408695652173913e-05,
"loss": 0.7572,
"step": 137
},
{
"epoch": 1.2002176278563657,
"grad_norm": 1.2265625,
"learning_rate": 4.404347826086957e-05,
"loss": 0.6791,
"step": 138
},
{
"epoch": 1.2089227421109903,
"grad_norm": 1.125,
"learning_rate": 4.4000000000000006e-05,
"loss": 0.7195,
"step": 139
},
{
"epoch": 1.2176278563656149,
"grad_norm": 0.984375,
"learning_rate": 4.395652173913043e-05,
"loss": 0.5021,
"step": 140
},
{
"epoch": 1.2263329706202395,
"grad_norm": 1.2578125,
"learning_rate": 4.391304347826087e-05,
"loss": 0.6567,
"step": 141
},
{
"epoch": 1.2350380848748639,
"grad_norm": 1.1875,
"learning_rate": 4.3869565217391306e-05,
"loss": 0.8551,
"step": 142
},
{
"epoch": 1.2437431991294885,
"grad_norm": 1.046875,
"learning_rate": 4.382608695652174e-05,
"loss": 0.7298,
"step": 143
},
{
"epoch": 1.252448313384113,
"grad_norm": 1.3203125,
"learning_rate": 4.378260869565218e-05,
"loss": 0.9639,
"step": 144
},
{
"epoch": 1.2611534276387377,
"grad_norm": 1.0546875,
"learning_rate": 4.373913043478261e-05,
"loss": 0.7321,
"step": 145
},
{
"epoch": 1.2698585418933623,
"grad_norm": 0.9921875,
"learning_rate": 4.3695652173913046e-05,
"loss": 0.6488,
"step": 146
},
{
"epoch": 1.278563656147987,
"grad_norm": 1.0703125,
"learning_rate": 4.365217391304348e-05,
"loss": 0.7783,
"step": 147
},
{
"epoch": 1.2872687704026116,
"grad_norm": 1.1484375,
"learning_rate": 4.360869565217391e-05,
"loss": 0.6042,
"step": 148
},
{
"epoch": 1.2959738846572362,
"grad_norm": 1.09375,
"learning_rate": 4.3565217391304353e-05,
"loss": 0.7951,
"step": 149
},
{
"epoch": 1.3046789989118608,
"grad_norm": 1.09375,
"learning_rate": 4.352173913043479e-05,
"loss": 0.6017,
"step": 150
},
{
"epoch": 1.3133841131664852,
"grad_norm": 1.2109375,
"learning_rate": 4.347826086956522e-05,
"loss": 0.6653,
"step": 151
},
{
"epoch": 1.32208922742111,
"grad_norm": 1.3828125,
"learning_rate": 4.3434782608695654e-05,
"loss": 0.9719,
"step": 152
},
{
"epoch": 1.3307943416757344,
"grad_norm": 1.015625,
"learning_rate": 4.339130434782609e-05,
"loss": 0.6013,
"step": 153
},
{
"epoch": 1.339499455930359,
"grad_norm": 1.1015625,
"learning_rate": 4.334782608695652e-05,
"loss": 0.6483,
"step": 154
},
{
"epoch": 1.3482045701849836,
"grad_norm": 1.0859375,
"learning_rate": 4.330434782608696e-05,
"loss": 0.6805,
"step": 155
},
{
"epoch": 1.3569096844396082,
"grad_norm": 1.140625,
"learning_rate": 4.3260869565217394e-05,
"loss": 0.7598,
"step": 156
},
{
"epoch": 1.3656147986942329,
"grad_norm": 1.046875,
"learning_rate": 4.321739130434783e-05,
"loss": 0.569,
"step": 157
},
{
"epoch": 1.3743199129488575,
"grad_norm": 1.0625,
"learning_rate": 4.317391304347827e-05,
"loss": 0.7223,
"step": 158
},
{
"epoch": 1.383025027203482,
"grad_norm": 1.1328125,
"learning_rate": 4.3130434782608695e-05,
"loss": 0.7309,
"step": 159
},
{
"epoch": 1.3917301414581067,
"grad_norm": 1.21875,
"learning_rate": 4.308695652173913e-05,
"loss": 0.9004,
"step": 160
},
{
"epoch": 1.4004352557127313,
"grad_norm": 1.125,
"learning_rate": 4.304347826086957e-05,
"loss": 0.6035,
"step": 161
},
{
"epoch": 1.4091403699673557,
"grad_norm": 1.2578125,
"learning_rate": 4.3e-05,
"loss": 0.957,
"step": 162
},
{
"epoch": 1.4178454842219805,
"grad_norm": 2.6875,
"learning_rate": 4.2956521739130435e-05,
"loss": 0.5841,
"step": 163
},
{
"epoch": 1.426550598476605,
"grad_norm": 1.1640625,
"learning_rate": 4.2913043478260875e-05,
"loss": 0.8018,
"step": 164
},
{
"epoch": 1.4352557127312295,
"grad_norm": 1.078125,
"learning_rate": 4.286956521739131e-05,
"loss": 0.6603,
"step": 165
},
{
"epoch": 1.4439608269858542,
"grad_norm": 1.40625,
"learning_rate": 4.282608695652174e-05,
"loss": 0.7107,
"step": 166
},
{
"epoch": 1.4526659412404788,
"grad_norm": 1.0390625,
"learning_rate": 4.2782608695652176e-05,
"loss": 0.7577,
"step": 167
},
{
"epoch": 1.4613710554951034,
"grad_norm": 1.0859375,
"learning_rate": 4.273913043478261e-05,
"loss": 0.7573,
"step": 168
},
{
"epoch": 1.470076169749728,
"grad_norm": 1.265625,
"learning_rate": 4.269565217391305e-05,
"loss": 0.7008,
"step": 169
},
{
"epoch": 1.4787812840043526,
"grad_norm": 1.1953125,
"learning_rate": 4.265217391304348e-05,
"loss": 0.6966,
"step": 170
},
{
"epoch": 1.4874863982589772,
"grad_norm": 1.1015625,
"learning_rate": 4.2608695652173916e-05,
"loss": 0.6855,
"step": 171
},
{
"epoch": 1.4961915125136018,
"grad_norm": 1.109375,
"learning_rate": 4.256521739130435e-05,
"loss": 0.67,
"step": 172
},
{
"epoch": 1.5048966267682262,
"grad_norm": 1.15625,
"learning_rate": 4.252173913043478e-05,
"loss": 0.8798,
"step": 173
},
{
"epoch": 1.513601741022851,
"grad_norm": 1.15625,
"learning_rate": 4.2478260869565216e-05,
"loss": 0.7921,
"step": 174
},
{
"epoch": 1.5223068552774754,
"grad_norm": 1.1171875,
"learning_rate": 4.2434782608695657e-05,
"loss": 0.6351,
"step": 175
},
{
"epoch": 1.5310119695321,
"grad_norm": 1.609375,
"learning_rate": 4.239130434782609e-05,
"loss": 0.6681,
"step": 176
},
{
"epoch": 1.5397170837867247,
"grad_norm": 1.265625,
"learning_rate": 4.2347826086956523e-05,
"loss": 0.7897,
"step": 177
},
{
"epoch": 1.5484221980413493,
"grad_norm": 1.2734375,
"learning_rate": 4.230434782608696e-05,
"loss": 0.772,
"step": 178
},
{
"epoch": 1.557127312295974,
"grad_norm": 1.0546875,
"learning_rate": 4.226086956521739e-05,
"loss": 0.5812,
"step": 179
},
{
"epoch": 1.5658324265505985,
"grad_norm": 1.0703125,
"learning_rate": 4.221739130434783e-05,
"loss": 0.5453,
"step": 180
},
{
"epoch": 1.5745375408052231,
"grad_norm": 1.0078125,
"learning_rate": 4.2173913043478264e-05,
"loss": 0.6831,
"step": 181
},
{
"epoch": 1.5832426550598475,
"grad_norm": 1.0625,
"learning_rate": 4.21304347826087e-05,
"loss": 0.767,
"step": 182
},
{
"epoch": 1.5919477693144723,
"grad_norm": 1.1484375,
"learning_rate": 4.208695652173914e-05,
"loss": 0.6145,
"step": 183
},
{
"epoch": 1.6006528835690967,
"grad_norm": 1.1015625,
"learning_rate": 4.204347826086957e-05,
"loss": 0.8171,
"step": 184
},
{
"epoch": 1.6093579978237216,
"grad_norm": 1.4609375,
"learning_rate": 4.2e-05,
"loss": 0.6039,
"step": 185
},
{
"epoch": 1.618063112078346,
"grad_norm": 1.1171875,
"learning_rate": 4.195652173913044e-05,
"loss": 0.7126,
"step": 186
},
{
"epoch": 1.6267682263329706,
"grad_norm": 1.1328125,
"learning_rate": 4.191304347826087e-05,
"loss": 0.5621,
"step": 187
},
{
"epoch": 1.6354733405875952,
"grad_norm": 1.0703125,
"learning_rate": 4.1869565217391305e-05,
"loss": 0.7605,
"step": 188
},
{
"epoch": 1.6441784548422198,
"grad_norm": 1.0625,
"learning_rate": 4.1826086956521745e-05,
"loss": 0.8704,
"step": 189
},
{
"epoch": 1.6528835690968444,
"grad_norm": 1.359375,
"learning_rate": 4.178260869565218e-05,
"loss": 0.7108,
"step": 190
},
{
"epoch": 1.661588683351469,
"grad_norm": 1.0859375,
"learning_rate": 4.1739130434782605e-05,
"loss": 0.67,
"step": 191
},
{
"epoch": 1.6702937976060936,
"grad_norm": 1.1171875,
"learning_rate": 4.1695652173913045e-05,
"loss": 0.9325,
"step": 192
},
{
"epoch": 1.678998911860718,
"grad_norm": 1.2578125,
"learning_rate": 4.165217391304348e-05,
"loss": 0.8226,
"step": 193
},
{
"epoch": 1.6877040261153429,
"grad_norm": 1.03125,
"learning_rate": 4.160869565217391e-05,
"loss": 0.8652,
"step": 194
},
{
"epoch": 1.6964091403699673,
"grad_norm": 1.171875,
"learning_rate": 4.156521739130435e-05,
"loss": 0.6873,
"step": 195
},
{
"epoch": 1.705114254624592,
"grad_norm": 1.1953125,
"learning_rate": 4.1521739130434786e-05,
"loss": 0.7681,
"step": 196
},
{
"epoch": 1.7138193688792165,
"grad_norm": 1.0625,
"learning_rate": 4.147826086956522e-05,
"loss": 0.6852,
"step": 197
},
{
"epoch": 1.722524483133841,
"grad_norm": 1.0546875,
"learning_rate": 4.143478260869565e-05,
"loss": 0.8035,
"step": 198
},
{
"epoch": 1.7312295973884657,
"grad_norm": 1.140625,
"learning_rate": 4.1391304347826086e-05,
"loss": 0.6638,
"step": 199
},
{
"epoch": 1.7399347116430903,
"grad_norm": 1.2265625,
"learning_rate": 4.1347826086956526e-05,
"loss": 0.8636,
"step": 200
},
{
"epoch": 1.748639825897715,
"grad_norm": 1.140625,
"learning_rate": 4.130434782608696e-05,
"loss": 0.9844,
"step": 201
},
{
"epoch": 1.7573449401523396,
"grad_norm": 1.1796875,
"learning_rate": 4.126086956521739e-05,
"loss": 0.7461,
"step": 202
},
{
"epoch": 1.7660500544069642,
"grad_norm": 1.1484375,
"learning_rate": 4.1217391304347827e-05,
"loss": 0.677,
"step": 203
},
{
"epoch": 1.7747551686615886,
"grad_norm": 1.234375,
"learning_rate": 4.117391304347826e-05,
"loss": 0.5882,
"step": 204
},
{
"epoch": 1.7834602829162134,
"grad_norm": 1.2109375,
"learning_rate": 4.1130434782608693e-05,
"loss": 0.7051,
"step": 205
},
{
"epoch": 1.7921653971708378,
"grad_norm": 1.2109375,
"learning_rate": 4.1086956521739134e-05,
"loss": 0.7658,
"step": 206
},
{
"epoch": 1.8008705114254626,
"grad_norm": 1.328125,
"learning_rate": 4.104347826086957e-05,
"loss": 0.8158,
"step": 207
},
{
"epoch": 1.809575625680087,
"grad_norm": 1.4765625,
"learning_rate": 4.1e-05,
"loss": 0.7081,
"step": 208
},
{
"epoch": 1.8182807399347116,
"grad_norm": 1.046875,
"learning_rate": 4.095652173913044e-05,
"loss": 0.648,
"step": 209
},
{
"epoch": 1.8269858541893362,
"grad_norm": 1.2734375,
"learning_rate": 4.091304347826087e-05,
"loss": 0.7721,
"step": 210
},
{
"epoch": 1.8356909684439608,
"grad_norm": 1.03125,
"learning_rate": 4.086956521739131e-05,
"loss": 0.5612,
"step": 211
},
{
"epoch": 1.8443960826985855,
"grad_norm": 1.1015625,
"learning_rate": 4.082608695652174e-05,
"loss": 0.635,
"step": 212
},
{
"epoch": 1.8531011969532099,
"grad_norm": 1.1015625,
"learning_rate": 4.0782608695652174e-05,
"loss": 0.7699,
"step": 213
},
{
"epoch": 1.8618063112078347,
"grad_norm": 1.234375,
"learning_rate": 4.0739130434782615e-05,
"loss": 0.7569,
"step": 214
},
{
"epoch": 1.870511425462459,
"grad_norm": 1.203125,
"learning_rate": 4.069565217391305e-05,
"loss": 0.7783,
"step": 215
},
{
"epoch": 1.879216539717084,
"grad_norm": 1.203125,
"learning_rate": 4.065217391304348e-05,
"loss": 0.7102,
"step": 216
},
{
"epoch": 1.8879216539717083,
"grad_norm": 2.53125,
"learning_rate": 4.0608695652173915e-05,
"loss": 1.0202,
"step": 217
},
{
"epoch": 1.896626768226333,
"grad_norm": 1.2109375,
"learning_rate": 4.056521739130435e-05,
"loss": 0.682,
"step": 218
},
{
"epoch": 1.9053318824809575,
"grad_norm": 1.03125,
"learning_rate": 4.052173913043478e-05,
"loss": 0.648,
"step": 219
},
{
"epoch": 1.9140369967355821,
"grad_norm": 1.234375,
"learning_rate": 4.047826086956522e-05,
"loss": 0.8244,
"step": 220
},
{
"epoch": 1.9227421109902068,
"grad_norm": 1.078125,
"learning_rate": 4.0434782608695655e-05,
"loss": 0.6392,
"step": 221
},
{
"epoch": 1.9314472252448314,
"grad_norm": 1.09375,
"learning_rate": 4.039130434782609e-05,
"loss": 0.7142,
"step": 222
},
{
"epoch": 1.940152339499456,
"grad_norm": 1.1328125,
"learning_rate": 4.034782608695652e-05,
"loss": 0.637,
"step": 223
},
{
"epoch": 1.9488574537540804,
"grad_norm": 1.03125,
"learning_rate": 4.0304347826086956e-05,
"loss": 0.7092,
"step": 224
},
{
"epoch": 1.9575625680087052,
"grad_norm": 1.0390625,
"learning_rate": 4.026086956521739e-05,
"loss": 0.6927,
"step": 225
},
{
"epoch": 1.9662676822633296,
"grad_norm": 1.125,
"learning_rate": 4.021739130434783e-05,
"loss": 0.744,
"step": 226
},
{
"epoch": 1.9749727965179544,
"grad_norm": 1.1484375,
"learning_rate": 4.017391304347826e-05,
"loss": 0.6604,
"step": 227
},
{
"epoch": 1.9836779107725788,
"grad_norm": 1.0625,
"learning_rate": 4.01304347826087e-05,
"loss": 0.7406,
"step": 228
},
{
"epoch": 1.9923830250272034,
"grad_norm": 1.078125,
"learning_rate": 4.008695652173913e-05,
"loss": 0.645,
"step": 229
},
{
"epoch": 2.0,
"grad_norm": 1.4765625,
"learning_rate": 4.004347826086956e-05,
"loss": 0.7339,
"step": 230
},
{
"epoch": 2.0087051142546244,
"grad_norm": 1.2578125,
"learning_rate": 4e-05,
"loss": 0.6646,
"step": 231
},
{
"epoch": 2.0174102285092492,
"grad_norm": 1.21875,
"learning_rate": 3.995652173913044e-05,
"loss": 0.4969,
"step": 232
},
{
"epoch": 2.0261153427638736,
"grad_norm": 1.1953125,
"learning_rate": 3.991304347826087e-05,
"loss": 0.5202,
"step": 233
},
{
"epoch": 2.0348204570184985,
"grad_norm": 0.97265625,
"learning_rate": 3.986956521739131e-05,
"loss": 0.671,
"step": 234
},
{
"epoch": 2.043525571273123,
"grad_norm": 1.1875,
"learning_rate": 3.9826086956521744e-05,
"loss": 0.4306,
"step": 235
},
{
"epoch": 2.0522306855277477,
"grad_norm": 1.75,
"learning_rate": 3.978260869565217e-05,
"loss": 0.5232,
"step": 236
},
{
"epoch": 2.060935799782372,
"grad_norm": 1.6953125,
"learning_rate": 3.973913043478261e-05,
"loss": 0.498,
"step": 237
},
{
"epoch": 2.069640914036997,
"grad_norm": 1.6484375,
"learning_rate": 3.9695652173913044e-05,
"loss": 0.6858,
"step": 238
},
{
"epoch": 2.0783460282916213,
"grad_norm": 1.5703125,
"learning_rate": 3.965217391304348e-05,
"loss": 0.525,
"step": 239
},
{
"epoch": 2.087051142546246,
"grad_norm": 1.203125,
"learning_rate": 3.960869565217392e-05,
"loss": 0.4226,
"step": 240
},
{
"epoch": 2.0957562568008705,
"grad_norm": 1.1953125,
"learning_rate": 3.956521739130435e-05,
"loss": 0.5474,
"step": 241
},
{
"epoch": 2.104461371055495,
"grad_norm": 1.109375,
"learning_rate": 3.9521739130434785e-05,
"loss": 0.4487,
"step": 242
},
{
"epoch": 2.1131664853101197,
"grad_norm": 1.125,
"learning_rate": 3.947826086956522e-05,
"loss": 0.404,
"step": 243
},
{
"epoch": 2.121871599564744,
"grad_norm": 1.1796875,
"learning_rate": 3.943478260869565e-05,
"loss": 0.4901,
"step": 244
},
{
"epoch": 2.130576713819369,
"grad_norm": 1.1875,
"learning_rate": 3.939130434782609e-05,
"loss": 0.417,
"step": 245
},
{
"epoch": 2.1392818280739934,
"grad_norm": 1.1171875,
"learning_rate": 3.9347826086956525e-05,
"loss": 0.7131,
"step": 246
},
{
"epoch": 2.147986942328618,
"grad_norm": 1.296875,
"learning_rate": 3.930434782608696e-05,
"loss": 0.5996,
"step": 247
},
{
"epoch": 2.1566920565832426,
"grad_norm": 1.171875,
"learning_rate": 3.926086956521739e-05,
"loss": 0.6252,
"step": 248
},
{
"epoch": 2.1653971708378674,
"grad_norm": 1.28125,
"learning_rate": 3.9217391304347825e-05,
"loss": 0.4977,
"step": 249
},
{
"epoch": 2.174102285092492,
"grad_norm": 1.1640625,
"learning_rate": 3.917391304347826e-05,
"loss": 0.7226,
"step": 250
},
{
"epoch": 2.1828073993471167,
"grad_norm": 1.40625,
"learning_rate": 3.91304347826087e-05,
"loss": 0.7221,
"step": 251
},
{
"epoch": 2.191512513601741,
"grad_norm": 1.171875,
"learning_rate": 3.908695652173913e-05,
"loss": 0.6118,
"step": 252
},
{
"epoch": 2.2002176278563654,
"grad_norm": 1.109375,
"learning_rate": 3.9043478260869566e-05,
"loss": 0.8739,
"step": 253
},
{
"epoch": 2.2089227421109903,
"grad_norm": 1.2734375,
"learning_rate": 3.9000000000000006e-05,
"loss": 0.5692,
"step": 254
},
{
"epoch": 2.2176278563656147,
"grad_norm": 1.2890625,
"learning_rate": 3.895652173913043e-05,
"loss": 0.527,
"step": 255
},
{
"epoch": 2.2263329706202395,
"grad_norm": 1.1015625,
"learning_rate": 3.8913043478260866e-05,
"loss": 0.4093,
"step": 256
},
{
"epoch": 2.235038084874864,
"grad_norm": 1.3828125,
"learning_rate": 3.8869565217391306e-05,
"loss": 0.57,
"step": 257
},
{
"epoch": 2.2437431991294887,
"grad_norm": 1.15625,
"learning_rate": 3.882608695652174e-05,
"loss": 0.4616,
"step": 258
},
{
"epoch": 2.252448313384113,
"grad_norm": 1.2265625,
"learning_rate": 3.878260869565218e-05,
"loss": 0.5554,
"step": 259
},
{
"epoch": 2.261153427638738,
"grad_norm": 1.0078125,
"learning_rate": 3.8739130434782613e-05,
"loss": 0.4197,
"step": 260
},
{
"epoch": 2.2698585418933623,
"grad_norm": 1.140625,
"learning_rate": 3.869565217391305e-05,
"loss": 0.4527,
"step": 261
},
{
"epoch": 2.2785636561479867,
"grad_norm": 1.28125,
"learning_rate": 3.865217391304348e-05,
"loss": 0.6683,
"step": 262
},
{
"epoch": 2.2872687704026116,
"grad_norm": 1.203125,
"learning_rate": 3.8608695652173914e-05,
"loss": 0.4808,
"step": 263
},
{
"epoch": 2.295973884657236,
"grad_norm": 1.109375,
"learning_rate": 3.856521739130435e-05,
"loss": 0.4871,
"step": 264
},
{
"epoch": 2.304678998911861,
"grad_norm": 1.2421875,
"learning_rate": 3.852173913043479e-05,
"loss": 0.6857,
"step": 265
},
{
"epoch": 2.313384113166485,
"grad_norm": 1.1328125,
"learning_rate": 3.847826086956522e-05,
"loss": 0.6002,
"step": 266
},
{
"epoch": 2.32208922742111,
"grad_norm": 1.1796875,
"learning_rate": 3.8434782608695654e-05,
"loss": 0.5997,
"step": 267
},
{
"epoch": 2.3307943416757344,
"grad_norm": 1.1640625,
"learning_rate": 3.839130434782609e-05,
"loss": 0.4839,
"step": 268
},
{
"epoch": 2.3394994559303592,
"grad_norm": 1.1640625,
"learning_rate": 3.834782608695652e-05,
"loss": 0.5754,
"step": 269
},
{
"epoch": 2.3482045701849836,
"grad_norm": 1.21875,
"learning_rate": 3.8304347826086955e-05,
"loss": 0.5072,
"step": 270
},
{
"epoch": 2.356909684439608,
"grad_norm": 1.453125,
"learning_rate": 3.8260869565217395e-05,
"loss": 0.6542,
"step": 271
},
{
"epoch": 2.365614798694233,
"grad_norm": 1.2265625,
"learning_rate": 3.821739130434783e-05,
"loss": 0.6175,
"step": 272
},
{
"epoch": 2.3743199129488577,
"grad_norm": 1.0859375,
"learning_rate": 3.817391304347827e-05,
"loss": 0.5063,
"step": 273
},
{
"epoch": 2.383025027203482,
"grad_norm": 1.1484375,
"learning_rate": 3.8130434782608695e-05,
"loss": 0.5095,
"step": 274
},
{
"epoch": 2.3917301414581065,
"grad_norm": 1.5078125,
"learning_rate": 3.808695652173913e-05,
"loss": 0.5236,
"step": 275
},
{
"epoch": 2.4004352557127313,
"grad_norm": 1.25,
"learning_rate": 3.804347826086957e-05,
"loss": 0.6417,
"step": 276
},
{
"epoch": 2.4091403699673557,
"grad_norm": 1.2421875,
"learning_rate": 3.8e-05,
"loss": 0.5618,
"step": 277
},
{
"epoch": 2.4178454842219805,
"grad_norm": 1.1796875,
"learning_rate": 3.7956521739130436e-05,
"loss": 0.409,
"step": 278
},
{
"epoch": 2.426550598476605,
"grad_norm": 1.2421875,
"learning_rate": 3.7913043478260876e-05,
"loss": 0.4815,
"step": 279
},
{
"epoch": 2.4352557127312298,
"grad_norm": 1.1796875,
"learning_rate": 3.786956521739131e-05,
"loss": 0.5533,
"step": 280
},
{
"epoch": 2.443960826985854,
"grad_norm": 1.1875,
"learning_rate": 3.7826086956521736e-05,
"loss": 0.4977,
"step": 281
},
{
"epoch": 2.452665941240479,
"grad_norm": 1.265625,
"learning_rate": 3.7782608695652176e-05,
"loss": 0.5402,
"step": 282
},
{
"epoch": 2.4613710554951034,
"grad_norm": 1.234375,
"learning_rate": 3.773913043478261e-05,
"loss": 0.4572,
"step": 283
},
{
"epoch": 2.4700761697497278,
"grad_norm": 1.1953125,
"learning_rate": 3.769565217391304e-05,
"loss": 0.5009,
"step": 284
},
{
"epoch": 2.4787812840043526,
"grad_norm": 1.140625,
"learning_rate": 3.765217391304348e-05,
"loss": 0.508,
"step": 285
},
{
"epoch": 2.487486398258977,
"grad_norm": 1.3359375,
"learning_rate": 3.7608695652173917e-05,
"loss": 0.5922,
"step": 286
},
{
"epoch": 2.496191512513602,
"grad_norm": 1.15625,
"learning_rate": 3.756521739130435e-05,
"loss": 0.5631,
"step": 287
},
{
"epoch": 2.504896626768226,
"grad_norm": 1.1953125,
"learning_rate": 3.752173913043478e-05,
"loss": 0.5335,
"step": 288
},
{
"epoch": 2.513601741022851,
"grad_norm": 1.28125,
"learning_rate": 3.747826086956522e-05,
"loss": 0.6231,
"step": 289
},
{
"epoch": 2.5223068552774754,
"grad_norm": 1.4765625,
"learning_rate": 3.743478260869566e-05,
"loss": 0.5049,
"step": 290
},
{
"epoch": 2.5310119695321003,
"grad_norm": 1.25,
"learning_rate": 3.739130434782609e-05,
"loss": 0.6165,
"step": 291
},
{
"epoch": 2.5397170837867247,
"grad_norm": 1.2265625,
"learning_rate": 3.7347826086956524e-05,
"loss": 0.5344,
"step": 292
},
{
"epoch": 2.548422198041349,
"grad_norm": 1.078125,
"learning_rate": 3.730434782608696e-05,
"loss": 0.5464,
"step": 293
},
{
"epoch": 2.557127312295974,
"grad_norm": 1.1640625,
"learning_rate": 3.726086956521739e-05,
"loss": 0.571,
"step": 294
},
{
"epoch": 2.5658324265505987,
"grad_norm": 1.1328125,
"learning_rate": 3.7217391304347824e-05,
"loss": 0.4843,
"step": 295
},
{
"epoch": 2.574537540805223,
"grad_norm": 1.4453125,
"learning_rate": 3.7173913043478264e-05,
"loss": 0.4947,
"step": 296
},
{
"epoch": 2.5832426550598475,
"grad_norm": 1.171875,
"learning_rate": 3.71304347826087e-05,
"loss": 0.5202,
"step": 297
},
{
"epoch": 2.5919477693144723,
"grad_norm": 1.1796875,
"learning_rate": 3.708695652173913e-05,
"loss": 0.4633,
"step": 298
},
{
"epoch": 2.6006528835690967,
"grad_norm": 1.3125,
"learning_rate": 3.704347826086957e-05,
"loss": 0.4723,
"step": 299
},
{
"epoch": 2.6093579978237216,
"grad_norm": 1.265625,
"learning_rate": 3.7e-05,
"loss": 0.4811,
"step": 300
},
{
"epoch": 2.618063112078346,
"grad_norm": 1.34375,
"learning_rate": 3.695652173913043e-05,
"loss": 0.7662,
"step": 301
},
{
"epoch": 2.6267682263329704,
"grad_norm": 1.3125,
"learning_rate": 3.691304347826087e-05,
"loss": 0.67,
"step": 302
},
{
"epoch": 2.635473340587595,
"grad_norm": 1.3671875,
"learning_rate": 3.6869565217391305e-05,
"loss": 0.6694,
"step": 303
},
{
"epoch": 2.64417845484222,
"grad_norm": 0.9921875,
"learning_rate": 3.682608695652174e-05,
"loss": 0.5212,
"step": 304
},
{
"epoch": 2.6528835690968444,
"grad_norm": 1.09375,
"learning_rate": 3.678260869565218e-05,
"loss": 0.4885,
"step": 305
},
{
"epoch": 2.661588683351469,
"grad_norm": 1.15625,
"learning_rate": 3.673913043478261e-05,
"loss": 0.6483,
"step": 306
},
{
"epoch": 2.6702937976060936,
"grad_norm": 1.25,
"learning_rate": 3.6695652173913046e-05,
"loss": 0.557,
"step": 307
},
{
"epoch": 2.678998911860718,
"grad_norm": 1.109375,
"learning_rate": 3.665217391304348e-05,
"loss": 0.5384,
"step": 308
},
{
"epoch": 2.687704026115343,
"grad_norm": 1.1484375,
"learning_rate": 3.660869565217391e-05,
"loss": 0.3727,
"step": 309
},
{
"epoch": 2.6964091403699673,
"grad_norm": 1.3515625,
"learning_rate": 3.656521739130435e-05,
"loss": 0.5383,
"step": 310
},
{
"epoch": 2.705114254624592,
"grad_norm": 1.328125,
"learning_rate": 3.6521739130434786e-05,
"loss": 0.6103,
"step": 311
},
{
"epoch": 2.7138193688792165,
"grad_norm": 1.2421875,
"learning_rate": 3.647826086956522e-05,
"loss": 0.5469,
"step": 312
},
{
"epoch": 2.7225244831338413,
"grad_norm": 1.1953125,
"learning_rate": 3.643478260869565e-05,
"loss": 0.5393,
"step": 313
},
{
"epoch": 2.7312295973884657,
"grad_norm": 1.1953125,
"learning_rate": 3.6391304347826086e-05,
"loss": 0.6248,
"step": 314
},
{
"epoch": 2.73993471164309,
"grad_norm": 1.2421875,
"learning_rate": 3.634782608695652e-05,
"loss": 0.5939,
"step": 315
},
{
"epoch": 2.748639825897715,
"grad_norm": 1.1796875,
"learning_rate": 3.630434782608696e-05,
"loss": 0.5281,
"step": 316
},
{
"epoch": 2.7573449401523398,
"grad_norm": 1.09375,
"learning_rate": 3.6260869565217394e-05,
"loss": 0.6094,
"step": 317
},
{
"epoch": 2.766050054406964,
"grad_norm": 1.625,
"learning_rate": 3.621739130434783e-05,
"loss": 0.5353,
"step": 318
},
{
"epoch": 2.7747551686615886,
"grad_norm": 1.296875,
"learning_rate": 3.617391304347826e-05,
"loss": 0.8688,
"step": 319
},
{
"epoch": 2.7834602829162134,
"grad_norm": 1.1796875,
"learning_rate": 3.6130434782608694e-05,
"loss": 0.4359,
"step": 320
},
{
"epoch": 2.792165397170838,
"grad_norm": 1.34375,
"learning_rate": 3.6086956521739134e-05,
"loss": 0.5274,
"step": 321
},
{
"epoch": 2.8008705114254626,
"grad_norm": 1.0703125,
"learning_rate": 3.604347826086957e-05,
"loss": 0.5489,
"step": 322
},
{
"epoch": 2.809575625680087,
"grad_norm": 1.4375,
"learning_rate": 3.6e-05,
"loss": 0.4897,
"step": 323
},
{
"epoch": 2.8182807399347114,
"grad_norm": 1.2109375,
"learning_rate": 3.595652173913044e-05,
"loss": 0.4663,
"step": 324
},
{
"epoch": 2.8269858541893362,
"grad_norm": 1.125,
"learning_rate": 3.5913043478260875e-05,
"loss": 0.6313,
"step": 325
},
{
"epoch": 2.835690968443961,
"grad_norm": 1.1875,
"learning_rate": 3.58695652173913e-05,
"loss": 0.8168,
"step": 326
},
{
"epoch": 2.8443960826985855,
"grad_norm": 1.203125,
"learning_rate": 3.582608695652174e-05,
"loss": 0.8116,
"step": 327
},
{
"epoch": 2.85310119695321,
"grad_norm": 1.1015625,
"learning_rate": 3.5782608695652175e-05,
"loss": 0.6349,
"step": 328
},
{
"epoch": 2.8618063112078347,
"grad_norm": 1.0859375,
"learning_rate": 3.573913043478261e-05,
"loss": 0.5963,
"step": 329
},
{
"epoch": 2.870511425462459,
"grad_norm": 1.09375,
"learning_rate": 3.569565217391305e-05,
"loss": 0.6366,
"step": 330
},
{
"epoch": 2.879216539717084,
"grad_norm": 1.09375,
"learning_rate": 3.565217391304348e-05,
"loss": 0.5822,
"step": 331
},
{
"epoch": 2.8879216539717083,
"grad_norm": 1.1796875,
"learning_rate": 3.5608695652173915e-05,
"loss": 0.4839,
"step": 332
},
{
"epoch": 2.8966267682263327,
"grad_norm": 1.0859375,
"learning_rate": 3.556521739130435e-05,
"loss": 0.6158,
"step": 333
},
{
"epoch": 2.9053318824809575,
"grad_norm": 1.2109375,
"learning_rate": 3.552173913043478e-05,
"loss": 0.4699,
"step": 334
},
{
"epoch": 2.9140369967355824,
"grad_norm": 1.4765625,
"learning_rate": 3.5478260869565216e-05,
"loss": 0.6381,
"step": 335
},
{
"epoch": 2.9227421109902068,
"grad_norm": 1.2421875,
"learning_rate": 3.5434782608695656e-05,
"loss": 0.5853,
"step": 336
},
{
"epoch": 2.931447225244831,
"grad_norm": 1.234375,
"learning_rate": 3.539130434782609e-05,
"loss": 0.6238,
"step": 337
},
{
"epoch": 2.940152339499456,
"grad_norm": 1.3046875,
"learning_rate": 3.534782608695652e-05,
"loss": 0.6795,
"step": 338
},
{
"epoch": 2.9488574537540804,
"grad_norm": 1.0859375,
"learning_rate": 3.5304347826086956e-05,
"loss": 0.546,
"step": 339
},
{
"epoch": 2.957562568008705,
"grad_norm": 1.1171875,
"learning_rate": 3.526086956521739e-05,
"loss": 0.4245,
"step": 340
},
{
"epoch": 2.9662676822633296,
"grad_norm": 1.265625,
"learning_rate": 3.521739130434783e-05,
"loss": 0.5433,
"step": 341
},
{
"epoch": 2.9749727965179544,
"grad_norm": 1.2578125,
"learning_rate": 3.517391304347826e-05,
"loss": 0.4675,
"step": 342
},
{
"epoch": 2.983677910772579,
"grad_norm": 1.28125,
"learning_rate": 3.51304347826087e-05,
"loss": 0.6289,
"step": 343
},
{
"epoch": 2.9923830250272037,
"grad_norm": 1.21875,
"learning_rate": 3.508695652173914e-05,
"loss": 0.6289,
"step": 344
},
{
"epoch": 3.0,
"grad_norm": 1.28125,
"learning_rate": 3.5043478260869564e-05,
"loss": 0.4109,
"step": 345
},
{
"epoch": 3.0087051142546244,
"grad_norm": 1.421875,
"learning_rate": 3.5e-05,
"loss": 0.4183,
"step": 346
},
{
"epoch": 3.0174102285092492,
"grad_norm": 1.1953125,
"learning_rate": 3.495652173913044e-05,
"loss": 0.3592,
"step": 347
},
{
"epoch": 3.0261153427638736,
"grad_norm": 1.5234375,
"learning_rate": 3.491304347826087e-05,
"loss": 0.5384,
"step": 348
},
{
"epoch": 3.0348204570184985,
"grad_norm": 1.125,
"learning_rate": 3.4869565217391304e-05,
"loss": 0.5139,
"step": 349
},
{
"epoch": 3.043525571273123,
"grad_norm": 1.7421875,
"learning_rate": 3.4826086956521744e-05,
"loss": 0.9164,
"step": 350
},
{
"epoch": 3.0522306855277477,
"grad_norm": 1.8125,
"learning_rate": 3.478260869565218e-05,
"loss": 0.3615,
"step": 351
},
{
"epoch": 3.060935799782372,
"grad_norm": 2.046875,
"learning_rate": 3.473913043478261e-05,
"loss": 0.3857,
"step": 352
},
{
"epoch": 3.069640914036997,
"grad_norm": 1.84375,
"learning_rate": 3.4695652173913045e-05,
"loss": 0.4078,
"step": 353
},
{
"epoch": 3.0783460282916213,
"grad_norm": 1.796875,
"learning_rate": 3.465217391304348e-05,
"loss": 0.4181,
"step": 354
},
{
"epoch": 3.087051142546246,
"grad_norm": 1.6015625,
"learning_rate": 3.460869565217392e-05,
"loss": 0.3059,
"step": 355
},
{
"epoch": 3.0957562568008705,
"grad_norm": 1.390625,
"learning_rate": 3.456521739130435e-05,
"loss": 0.4606,
"step": 356
},
{
"epoch": 3.104461371055495,
"grad_norm": 1.28125,
"learning_rate": 3.4521739130434785e-05,
"loss": 0.4711,
"step": 357
},
{
"epoch": 3.1131664853101197,
"grad_norm": 1.28125,
"learning_rate": 3.447826086956522e-05,
"loss": 0.4096,
"step": 358
},
{
"epoch": 3.121871599564744,
"grad_norm": 1.25,
"learning_rate": 3.443478260869565e-05,
"loss": 0.6088,
"step": 359
},
{
"epoch": 3.130576713819369,
"grad_norm": 1.171875,
"learning_rate": 3.4391304347826085e-05,
"loss": 0.3165,
"step": 360
},
{
"epoch": 3.1392818280739934,
"grad_norm": 1.2421875,
"learning_rate": 3.4347826086956526e-05,
"loss": 0.306,
"step": 361
},
{
"epoch": 3.147986942328618,
"grad_norm": 1.2890625,
"learning_rate": 3.430434782608696e-05,
"loss": 0.3672,
"step": 362
},
{
"epoch": 3.1566920565832426,
"grad_norm": 1.1875,
"learning_rate": 3.426086956521739e-05,
"loss": 0.4125,
"step": 363
},
{
"epoch": 3.1653971708378674,
"grad_norm": 1.5,
"learning_rate": 3.4217391304347826e-05,
"loss": 0.4383,
"step": 364
},
{
"epoch": 3.174102285092492,
"grad_norm": 1.40625,
"learning_rate": 3.417391304347826e-05,
"loss": 0.3679,
"step": 365
},
{
"epoch": 3.1828073993471167,
"grad_norm": 1.453125,
"learning_rate": 3.413043478260869e-05,
"loss": 0.4468,
"step": 366
},
{
"epoch": 3.191512513601741,
"grad_norm": 1.34375,
"learning_rate": 3.408695652173913e-05,
"loss": 0.3335,
"step": 367
},
{
"epoch": 3.2002176278563654,
"grad_norm": 1.359375,
"learning_rate": 3.4043478260869566e-05,
"loss": 0.3439,
"step": 368
},
{
"epoch": 3.2089227421109903,
"grad_norm": 1.4765625,
"learning_rate": 3.4000000000000007e-05,
"loss": 0.4858,
"step": 369
},
{
"epoch": 3.2176278563656147,
"grad_norm": 1.453125,
"learning_rate": 3.395652173913044e-05,
"loss": 0.3231,
"step": 370
},
{
"epoch": 3.2263329706202395,
"grad_norm": 1.6171875,
"learning_rate": 3.3913043478260867e-05,
"loss": 0.5224,
"step": 371
},
{
"epoch": 3.235038084874864,
"grad_norm": 1.328125,
"learning_rate": 3.386956521739131e-05,
"loss": 0.3573,
"step": 372
},
{
"epoch": 3.2437431991294887,
"grad_norm": 1.359375,
"learning_rate": 3.382608695652174e-05,
"loss": 0.3516,
"step": 373
},
{
"epoch": 3.252448313384113,
"grad_norm": 1.3515625,
"learning_rate": 3.3782608695652174e-05,
"loss": 0.3656,
"step": 374
},
{
"epoch": 3.261153427638738,
"grad_norm": 1.4453125,
"learning_rate": 3.3739130434782614e-05,
"loss": 0.5266,
"step": 375
},
{
"epoch": 3.2698585418933623,
"grad_norm": 1.3515625,
"learning_rate": 3.369565217391305e-05,
"loss": 0.3544,
"step": 376
},
{
"epoch": 3.2785636561479867,
"grad_norm": 1.2890625,
"learning_rate": 3.365217391304348e-05,
"loss": 0.3623,
"step": 377
},
{
"epoch": 3.2872687704026116,
"grad_norm": 1.3203125,
"learning_rate": 3.3608695652173914e-05,
"loss": 0.3814,
"step": 378
},
{
"epoch": 3.295973884657236,
"grad_norm": 1.390625,
"learning_rate": 3.356521739130435e-05,
"loss": 0.42,
"step": 379
},
{
"epoch": 3.304678998911861,
"grad_norm": 1.2890625,
"learning_rate": 3.352173913043478e-05,
"loss": 0.4972,
"step": 380
},
{
"epoch": 3.313384113166485,
"grad_norm": 1.6015625,
"learning_rate": 3.347826086956522e-05,
"loss": 0.5218,
"step": 381
},
{
"epoch": 3.32208922742111,
"grad_norm": 1.1953125,
"learning_rate": 3.3434782608695655e-05,
"loss": 0.3364,
"step": 382
},
{
"epoch": 3.3307943416757344,
"grad_norm": 1.2890625,
"learning_rate": 3.339130434782609e-05,
"loss": 0.363,
"step": 383
},
{
"epoch": 3.3394994559303592,
"grad_norm": 1.40625,
"learning_rate": 3.334782608695652e-05,
"loss": 0.3453,
"step": 384
},
{
"epoch": 3.3482045701849836,
"grad_norm": 1.4765625,
"learning_rate": 3.3304347826086955e-05,
"loss": 0.3866,
"step": 385
},
{
"epoch": 3.356909684439608,
"grad_norm": 1.34375,
"learning_rate": 3.3260869565217395e-05,
"loss": 0.3148,
"step": 386
},
{
"epoch": 3.365614798694233,
"grad_norm": 1.2578125,
"learning_rate": 3.321739130434783e-05,
"loss": 0.6014,
"step": 387
},
{
"epoch": 3.3743199129488577,
"grad_norm": 1.4609375,
"learning_rate": 3.317391304347826e-05,
"loss": 0.3306,
"step": 388
},
{
"epoch": 3.383025027203482,
"grad_norm": 1.3125,
"learning_rate": 3.31304347826087e-05,
"loss": 0.4507,
"step": 389
},
{
"epoch": 3.3917301414581065,
"grad_norm": 1.3984375,
"learning_rate": 3.308695652173913e-05,
"loss": 0.4535,
"step": 390
},
{
"epoch": 3.4004352557127313,
"grad_norm": 1.296875,
"learning_rate": 3.304347826086956e-05,
"loss": 0.4736,
"step": 391
},
{
"epoch": 3.4091403699673557,
"grad_norm": 1.2890625,
"learning_rate": 3.3e-05,
"loss": 0.3137,
"step": 392
},
{
"epoch": 3.4178454842219805,
"grad_norm": 1.2109375,
"learning_rate": 3.2956521739130436e-05,
"loss": 0.335,
"step": 393
},
{
"epoch": 3.426550598476605,
"grad_norm": 1.328125,
"learning_rate": 3.291304347826087e-05,
"loss": 0.3644,
"step": 394
},
{
"epoch": 3.4352557127312298,
"grad_norm": 1.40625,
"learning_rate": 3.286956521739131e-05,
"loss": 0.3236,
"step": 395
},
{
"epoch": 3.443960826985854,
"grad_norm": 1.40625,
"learning_rate": 3.282608695652174e-05,
"loss": 0.6159,
"step": 396
},
{
"epoch": 3.452665941240479,
"grad_norm": 1.4140625,
"learning_rate": 3.278260869565217e-05,
"loss": 0.4811,
"step": 397
},
{
"epoch": 3.4613710554951034,
"grad_norm": 1.1640625,
"learning_rate": 3.273913043478261e-05,
"loss": 0.3592,
"step": 398
},
{
"epoch": 3.4700761697497278,
"grad_norm": 1.203125,
"learning_rate": 3.269565217391304e-05,
"loss": 0.31,
"step": 399
},
{
"epoch": 3.4787812840043526,
"grad_norm": 1.46875,
"learning_rate": 3.2652173913043484e-05,
"loss": 0.3305,
"step": 400
},
{
"epoch": 3.487486398258977,
"grad_norm": 1.4609375,
"learning_rate": 3.260869565217392e-05,
"loss": 0.4076,
"step": 401
},
{
"epoch": 3.496191512513602,
"grad_norm": 1.46875,
"learning_rate": 3.256521739130435e-05,
"loss": 0.4706,
"step": 402
},
{
"epoch": 3.504896626768226,
"grad_norm": 1.2578125,
"learning_rate": 3.2521739130434784e-05,
"loss": 0.2767,
"step": 403
},
{
"epoch": 3.513601741022851,
"grad_norm": 1.28125,
"learning_rate": 3.247826086956522e-05,
"loss": 0.3916,
"step": 404
},
{
"epoch": 3.5223068552774754,
"grad_norm": 1.234375,
"learning_rate": 3.243478260869565e-05,
"loss": 0.3626,
"step": 405
},
{
"epoch": 3.5310119695321003,
"grad_norm": 1.265625,
"learning_rate": 3.239130434782609e-05,
"loss": 0.3815,
"step": 406
},
{
"epoch": 3.5397170837867247,
"grad_norm": 1.1484375,
"learning_rate": 3.2347826086956524e-05,
"loss": 0.3034,
"step": 407
},
{
"epoch": 3.548422198041349,
"grad_norm": 1.3046875,
"learning_rate": 3.230434782608696e-05,
"loss": 0.374,
"step": 408
},
{
"epoch": 3.557127312295974,
"grad_norm": 1.3828125,
"learning_rate": 3.226086956521739e-05,
"loss": 0.3537,
"step": 409
},
{
"epoch": 3.5658324265505987,
"grad_norm": 1.4609375,
"learning_rate": 3.2217391304347825e-05,
"loss": 0.5427,
"step": 410
},
{
"epoch": 3.574537540805223,
"grad_norm": 1.3359375,
"learning_rate": 3.217391304347826e-05,
"loss": 0.3466,
"step": 411
},
{
"epoch": 3.5832426550598475,
"grad_norm": 1.4453125,
"learning_rate": 3.21304347826087e-05,
"loss": 0.5072,
"step": 412
},
{
"epoch": 3.5919477693144723,
"grad_norm": 1.3984375,
"learning_rate": 3.208695652173913e-05,
"loss": 0.3162,
"step": 413
},
{
"epoch": 3.6006528835690967,
"grad_norm": 1.1953125,
"learning_rate": 3.204347826086957e-05,
"loss": 0.51,
"step": 414
},
{
"epoch": 3.6093579978237216,
"grad_norm": 1.25,
"learning_rate": 3.2000000000000005e-05,
"loss": 0.4698,
"step": 415
},
{
"epoch": 3.618063112078346,
"grad_norm": 1.296875,
"learning_rate": 3.195652173913043e-05,
"loss": 0.3544,
"step": 416
},
{
"epoch": 3.6267682263329704,
"grad_norm": 1.453125,
"learning_rate": 3.191304347826087e-05,
"loss": 0.3747,
"step": 417
},
{
"epoch": 3.635473340587595,
"grad_norm": 1.328125,
"learning_rate": 3.1869565217391306e-05,
"loss": 0.4795,
"step": 418
},
{
"epoch": 3.64417845484222,
"grad_norm": 1.2734375,
"learning_rate": 3.182608695652174e-05,
"loss": 0.5804,
"step": 419
},
{
"epoch": 3.6528835690968444,
"grad_norm": 1.3125,
"learning_rate": 3.178260869565218e-05,
"loss": 0.4708,
"step": 420
},
{
"epoch": 3.661588683351469,
"grad_norm": 1.1796875,
"learning_rate": 3.173913043478261e-05,
"loss": 0.4494,
"step": 421
},
{
"epoch": 3.6702937976060936,
"grad_norm": 1.34375,
"learning_rate": 3.1695652173913046e-05,
"loss": 0.4712,
"step": 422
},
{
"epoch": 3.678998911860718,
"grad_norm": 1.6953125,
"learning_rate": 3.165217391304348e-05,
"loss": 0.5315,
"step": 423
},
{
"epoch": 3.687704026115343,
"grad_norm": 1.2578125,
"learning_rate": 3.160869565217391e-05,
"loss": 0.3474,
"step": 424
},
{
"epoch": 3.6964091403699673,
"grad_norm": 1.328125,
"learning_rate": 3.1565217391304346e-05,
"loss": 0.408,
"step": 425
},
{
"epoch": 3.705114254624592,
"grad_norm": 1.3828125,
"learning_rate": 3.152173913043479e-05,
"loss": 0.3594,
"step": 426
},
{
"epoch": 3.7138193688792165,
"grad_norm": 1.296875,
"learning_rate": 3.147826086956522e-05,
"loss": 0.3591,
"step": 427
},
{
"epoch": 3.7225244831338413,
"grad_norm": 1.359375,
"learning_rate": 3.1434782608695653e-05,
"loss": 0.4131,
"step": 428
},
{
"epoch": 3.7312295973884657,
"grad_norm": 1.2265625,
"learning_rate": 3.139130434782609e-05,
"loss": 0.59,
"step": 429
},
{
"epoch": 3.73993471164309,
"grad_norm": 1.515625,
"learning_rate": 3.134782608695652e-05,
"loss": 0.5576,
"step": 430
},
{
"epoch": 3.748639825897715,
"grad_norm": 1.3828125,
"learning_rate": 3.130434782608696e-05,
"loss": 0.333,
"step": 431
},
{
"epoch": 3.7573449401523398,
"grad_norm": 1.25,
"learning_rate": 3.1260869565217394e-05,
"loss": 0.3218,
"step": 432
},
{
"epoch": 3.766050054406964,
"grad_norm": 1.328125,
"learning_rate": 3.121739130434783e-05,
"loss": 0.4542,
"step": 433
},
{
"epoch": 3.7747551686615886,
"grad_norm": 1.5234375,
"learning_rate": 3.117391304347827e-05,
"loss": 0.497,
"step": 434
},
{
"epoch": 3.7834602829162134,
"grad_norm": 1.40625,
"learning_rate": 3.1130434782608694e-05,
"loss": 0.3721,
"step": 435
},
{
"epoch": 3.792165397170838,
"grad_norm": 1.34375,
"learning_rate": 3.108695652173913e-05,
"loss": 0.4833,
"step": 436
},
{
"epoch": 3.8008705114254626,
"grad_norm": 1.484375,
"learning_rate": 3.104347826086957e-05,
"loss": 0.3413,
"step": 437
},
{
"epoch": 3.809575625680087,
"grad_norm": 1.1640625,
"learning_rate": 3.1e-05,
"loss": 0.3731,
"step": 438
},
{
"epoch": 3.8182807399347114,
"grad_norm": 1.2734375,
"learning_rate": 3.0956521739130435e-05,
"loss": 0.3438,
"step": 439
},
{
"epoch": 3.8269858541893362,
"grad_norm": 1.359375,
"learning_rate": 3.0913043478260875e-05,
"loss": 0.4249,
"step": 440
},
{
"epoch": 3.835690968443961,
"grad_norm": 1.25,
"learning_rate": 3.086956521739131e-05,
"loss": 0.4995,
"step": 441
},
{
"epoch": 3.8443960826985855,
"grad_norm": 1.484375,
"learning_rate": 3.0826086956521735e-05,
"loss": 0.4477,
"step": 442
},
{
"epoch": 3.85310119695321,
"grad_norm": 1.25,
"learning_rate": 3.0782608695652175e-05,
"loss": 0.4823,
"step": 443
},
{
"epoch": 3.8618063112078347,
"grad_norm": 1.3828125,
"learning_rate": 3.073913043478261e-05,
"loss": 0.5226,
"step": 444
},
{
"epoch": 3.870511425462459,
"grad_norm": 1.359375,
"learning_rate": 3.069565217391305e-05,
"loss": 0.4088,
"step": 445
},
{
"epoch": 3.879216539717084,
"grad_norm": 1.5625,
"learning_rate": 3.065217391304348e-05,
"loss": 0.4232,
"step": 446
},
{
"epoch": 3.8879216539717083,
"grad_norm": 1.4375,
"learning_rate": 3.0608695652173916e-05,
"loss": 0.3985,
"step": 447
},
{
"epoch": 3.8966267682263327,
"grad_norm": 1.421875,
"learning_rate": 3.056521739130435e-05,
"loss": 0.4992,
"step": 448
},
{
"epoch": 3.9053318824809575,
"grad_norm": 1.4296875,
"learning_rate": 3.052173913043478e-05,
"loss": 0.4681,
"step": 449
},
{
"epoch": 3.9140369967355824,
"grad_norm": 1.34375,
"learning_rate": 3.0478260869565216e-05,
"loss": 0.3519,
"step": 450
},
{
"epoch": 3.9227421109902068,
"grad_norm": 1.15625,
"learning_rate": 3.0434782608695656e-05,
"loss": 0.38,
"step": 451
},
{
"epoch": 3.931447225244831,
"grad_norm": 1.421875,
"learning_rate": 3.039130434782609e-05,
"loss": 0.5022,
"step": 452
},
{
"epoch": 3.940152339499456,
"grad_norm": 1.2890625,
"learning_rate": 3.034782608695652e-05,
"loss": 0.4629,
"step": 453
},
{
"epoch": 3.9488574537540804,
"grad_norm": 1.328125,
"learning_rate": 3.030434782608696e-05,
"loss": 0.7028,
"step": 454
},
{
"epoch": 3.957562568008705,
"grad_norm": 1.15625,
"learning_rate": 3.0260869565217393e-05,
"loss": 0.3148,
"step": 455
},
{
"epoch": 3.9662676822633296,
"grad_norm": 1.3046875,
"learning_rate": 3.0217391304347827e-05,
"loss": 0.5369,
"step": 456
},
{
"epoch": 3.9749727965179544,
"grad_norm": 1.125,
"learning_rate": 3.0173913043478264e-05,
"loss": 0.3866,
"step": 457
},
{
"epoch": 3.983677910772579,
"grad_norm": 1.3203125,
"learning_rate": 3.0130434782608697e-05,
"loss": 0.5832,
"step": 458
},
{
"epoch": 3.9923830250272037,
"grad_norm": 1.359375,
"learning_rate": 3.008695652173913e-05,
"loss": 0.4359,
"step": 459
},
{
"epoch": 4.0,
"grad_norm": 1.7265625,
"learning_rate": 3.0043478260869567e-05,
"loss": 0.3593,
"step": 460
},
{
"epoch": 4.008705114254624,
"grad_norm": 1.4140625,
"learning_rate": 3e-05,
"loss": 0.3197,
"step": 461
},
{
"epoch": 4.017410228509249,
"grad_norm": 1.15625,
"learning_rate": 2.9956521739130438e-05,
"loss": 0.3831,
"step": 462
},
{
"epoch": 4.026115342763874,
"grad_norm": 1.3828125,
"learning_rate": 2.991304347826087e-05,
"loss": 0.2362,
"step": 463
},
{
"epoch": 4.0348204570184985,
"grad_norm": 1.328125,
"learning_rate": 2.9869565217391304e-05,
"loss": 0.2624,
"step": 464
},
{
"epoch": 4.043525571273123,
"grad_norm": 1.234375,
"learning_rate": 2.982608695652174e-05,
"loss": 0.2093,
"step": 465
},
{
"epoch": 4.052230685527747,
"grad_norm": 1.9296875,
"learning_rate": 2.9782608695652175e-05,
"loss": 0.4162,
"step": 466
},
{
"epoch": 4.0609357997823725,
"grad_norm": 1.9921875,
"learning_rate": 2.9739130434782608e-05,
"loss": 0.286,
"step": 467
},
{
"epoch": 4.069640914036997,
"grad_norm": 2.28125,
"learning_rate": 2.969565217391305e-05,
"loss": 0.3429,
"step": 468
},
{
"epoch": 4.078346028291621,
"grad_norm": 1.7421875,
"learning_rate": 2.965217391304348e-05,
"loss": 0.2248,
"step": 469
},
{
"epoch": 4.087051142546246,
"grad_norm": 1.6484375,
"learning_rate": 2.9608695652173912e-05,
"loss": 0.3577,
"step": 470
},
{
"epoch": 4.09575625680087,
"grad_norm": 1.453125,
"learning_rate": 2.9565217391304352e-05,
"loss": 0.2521,
"step": 471
},
{
"epoch": 4.104461371055495,
"grad_norm": 1.4921875,
"learning_rate": 2.9521739130434782e-05,
"loss": 0.2246,
"step": 472
},
{
"epoch": 4.11316648531012,
"grad_norm": 1.7578125,
"learning_rate": 2.9478260869565215e-05,
"loss": 0.4472,
"step": 473
},
{
"epoch": 4.121871599564744,
"grad_norm": 1.515625,
"learning_rate": 2.9434782608695656e-05,
"loss": 0.2405,
"step": 474
},
{
"epoch": 4.1305767138193685,
"grad_norm": 1.171875,
"learning_rate": 2.939130434782609e-05,
"loss": 0.2111,
"step": 475
},
{
"epoch": 4.139281828073994,
"grad_norm": 1.171875,
"learning_rate": 2.9347826086956526e-05,
"loss": 0.3421,
"step": 476
},
{
"epoch": 4.147986942328618,
"grad_norm": 1.359375,
"learning_rate": 2.930434782608696e-05,
"loss": 0.343,
"step": 477
},
{
"epoch": 4.156692056583243,
"grad_norm": 1.390625,
"learning_rate": 2.9260869565217393e-05,
"loss": 0.1912,
"step": 478
},
{
"epoch": 4.165397170837867,
"grad_norm": 1.8203125,
"learning_rate": 2.921739130434783e-05,
"loss": 0.3702,
"step": 479
},
{
"epoch": 4.174102285092492,
"grad_norm": 1.828125,
"learning_rate": 2.9173913043478263e-05,
"loss": 0.2608,
"step": 480
},
{
"epoch": 4.182807399347117,
"grad_norm": 1.4375,
"learning_rate": 2.9130434782608696e-05,
"loss": 0.2425,
"step": 481
},
{
"epoch": 4.191512513601741,
"grad_norm": 1.5,
"learning_rate": 2.9086956521739133e-05,
"loss": 0.2814,
"step": 482
},
{
"epoch": 4.200217627856365,
"grad_norm": 1.65625,
"learning_rate": 2.9043478260869567e-05,
"loss": 0.2904,
"step": 483
},
{
"epoch": 4.20892274211099,
"grad_norm": 1.4765625,
"learning_rate": 2.9e-05,
"loss": 0.275,
"step": 484
},
{
"epoch": 4.217627856365615,
"grad_norm": 1.390625,
"learning_rate": 2.8956521739130437e-05,
"loss": 0.2462,
"step": 485
},
{
"epoch": 4.2263329706202395,
"grad_norm": 1.5234375,
"learning_rate": 2.891304347826087e-05,
"loss": 0.262,
"step": 486
},
{
"epoch": 4.235038084874864,
"grad_norm": 1.25,
"learning_rate": 2.8869565217391304e-05,
"loss": 0.2443,
"step": 487
},
{
"epoch": 4.243743199129488,
"grad_norm": 1.4140625,
"learning_rate": 2.882608695652174e-05,
"loss": 0.3418,
"step": 488
},
{
"epoch": 4.2524483133841136,
"grad_norm": 1.4140625,
"learning_rate": 2.8782608695652174e-05,
"loss": 0.2845,
"step": 489
},
{
"epoch": 4.261153427638738,
"grad_norm": 1.671875,
"learning_rate": 2.8739130434782608e-05,
"loss": 0.3489,
"step": 490
},
{
"epoch": 4.269858541893362,
"grad_norm": 1.421875,
"learning_rate": 2.8695652173913044e-05,
"loss": 0.2591,
"step": 491
},
{
"epoch": 4.278563656147987,
"grad_norm": 1.5390625,
"learning_rate": 2.8652173913043478e-05,
"loss": 0.5734,
"step": 492
},
{
"epoch": 4.287268770402612,
"grad_norm": 1.4296875,
"learning_rate": 2.8608695652173918e-05,
"loss": 0.2109,
"step": 493
},
{
"epoch": 4.295973884657236,
"grad_norm": 1.5,
"learning_rate": 2.8565217391304348e-05,
"loss": 0.2421,
"step": 494
},
{
"epoch": 4.304678998911861,
"grad_norm": 1.5390625,
"learning_rate": 2.852173913043478e-05,
"loss": 0.4073,
"step": 495
},
{
"epoch": 4.313384113166485,
"grad_norm": 1.5546875,
"learning_rate": 2.847826086956522e-05,
"loss": 0.2709,
"step": 496
},
{
"epoch": 4.32208922742111,
"grad_norm": 1.453125,
"learning_rate": 2.8434782608695655e-05,
"loss": 0.2862,
"step": 497
},
{
"epoch": 4.330794341675735,
"grad_norm": 1.390625,
"learning_rate": 2.8391304347826085e-05,
"loss": 0.5668,
"step": 498
},
{
"epoch": 4.339499455930359,
"grad_norm": 1.7421875,
"learning_rate": 2.8347826086956525e-05,
"loss": 0.53,
"step": 499
},
{
"epoch": 4.348204570184984,
"grad_norm": 1.40625,
"learning_rate": 2.830434782608696e-05,
"loss": 0.2408,
"step": 500
},
{
"epoch": 4.356909684439608,
"grad_norm": 1.265625,
"learning_rate": 2.826086956521739e-05,
"loss": 0.2201,
"step": 501
},
{
"epoch": 4.365614798694233,
"grad_norm": 1.2890625,
"learning_rate": 2.821739130434783e-05,
"loss": 0.2811,
"step": 502
},
{
"epoch": 4.374319912948858,
"grad_norm": 1.484375,
"learning_rate": 2.8173913043478262e-05,
"loss": 0.2138,
"step": 503
},
{
"epoch": 4.383025027203482,
"grad_norm": 1.3203125,
"learning_rate": 2.8130434782608696e-05,
"loss": 0.2542,
"step": 504
},
{
"epoch": 4.3917301414581065,
"grad_norm": 1.640625,
"learning_rate": 2.8086956521739133e-05,
"loss": 0.289,
"step": 505
},
{
"epoch": 4.400435255712731,
"grad_norm": 1.5078125,
"learning_rate": 2.8043478260869566e-05,
"loss": 0.2872,
"step": 506
},
{
"epoch": 4.409140369967356,
"grad_norm": 1.3515625,
"learning_rate": 2.8000000000000003e-05,
"loss": 0.3546,
"step": 507
},
{
"epoch": 4.4178454842219805,
"grad_norm": 1.546875,
"learning_rate": 2.7956521739130436e-05,
"loss": 0.2645,
"step": 508
},
{
"epoch": 4.426550598476605,
"grad_norm": 1.2890625,
"learning_rate": 2.791304347826087e-05,
"loss": 0.3288,
"step": 509
},
{
"epoch": 4.435255712731229,
"grad_norm": 1.6015625,
"learning_rate": 2.7869565217391307e-05,
"loss": 0.3309,
"step": 510
},
{
"epoch": 4.443960826985855,
"grad_norm": 1.390625,
"learning_rate": 2.782608695652174e-05,
"loss": 0.2221,
"step": 511
},
{
"epoch": 4.452665941240479,
"grad_norm": 1.1796875,
"learning_rate": 2.7782608695652174e-05,
"loss": 0.3022,
"step": 512
},
{
"epoch": 4.461371055495103,
"grad_norm": 1.484375,
"learning_rate": 2.773913043478261e-05,
"loss": 0.2131,
"step": 513
},
{
"epoch": 4.470076169749728,
"grad_norm": 1.3046875,
"learning_rate": 2.7695652173913044e-05,
"loss": 0.3289,
"step": 514
},
{
"epoch": 4.478781284004352,
"grad_norm": 1.2890625,
"learning_rate": 2.7652173913043477e-05,
"loss": 0.1957,
"step": 515
},
{
"epoch": 4.487486398258977,
"grad_norm": 1.578125,
"learning_rate": 2.7608695652173917e-05,
"loss": 0.3898,
"step": 516
},
{
"epoch": 4.496191512513602,
"grad_norm": 1.2890625,
"learning_rate": 2.7565217391304347e-05,
"loss": 0.28,
"step": 517
},
{
"epoch": 4.504896626768226,
"grad_norm": 1.328125,
"learning_rate": 2.752173913043478e-05,
"loss": 0.348,
"step": 518
},
{
"epoch": 4.513601741022851,
"grad_norm": 1.546875,
"learning_rate": 2.747826086956522e-05,
"loss": 0.4128,
"step": 519
},
{
"epoch": 4.522306855277476,
"grad_norm": 1.2421875,
"learning_rate": 2.743478260869565e-05,
"loss": 0.294,
"step": 520
},
{
"epoch": 4.5310119695321,
"grad_norm": 1.609375,
"learning_rate": 2.7391304347826085e-05,
"loss": 0.1959,
"step": 521
},
{
"epoch": 4.539717083786725,
"grad_norm": 1.4609375,
"learning_rate": 2.7347826086956525e-05,
"loss": 0.2295,
"step": 522
},
{
"epoch": 4.548422198041349,
"grad_norm": 1.6796875,
"learning_rate": 2.7304347826086958e-05,
"loss": 0.5485,
"step": 523
},
{
"epoch": 4.5571273122959735,
"grad_norm": 1.4765625,
"learning_rate": 2.7260869565217395e-05,
"loss": 0.2739,
"step": 524
},
{
"epoch": 4.565832426550599,
"grad_norm": 1.421875,
"learning_rate": 2.721739130434783e-05,
"loss": 0.3562,
"step": 525
},
{
"epoch": 4.574537540805223,
"grad_norm": 1.4921875,
"learning_rate": 2.7173913043478262e-05,
"loss": 0.4189,
"step": 526
},
{
"epoch": 4.5832426550598475,
"grad_norm": 1.453125,
"learning_rate": 2.71304347826087e-05,
"loss": 0.4434,
"step": 527
},
{
"epoch": 4.591947769314472,
"grad_norm": 1.453125,
"learning_rate": 2.7086956521739132e-05,
"loss": 0.3395,
"step": 528
},
{
"epoch": 4.600652883569097,
"grad_norm": 1.5078125,
"learning_rate": 2.7043478260869566e-05,
"loss": 0.3857,
"step": 529
},
{
"epoch": 4.609357997823722,
"grad_norm": 1.5703125,
"learning_rate": 2.7000000000000002e-05,
"loss": 0.2454,
"step": 530
},
{
"epoch": 4.618063112078346,
"grad_norm": 1.546875,
"learning_rate": 2.6956521739130436e-05,
"loss": 0.2801,
"step": 531
},
{
"epoch": 4.62676822633297,
"grad_norm": 1.796875,
"learning_rate": 2.691304347826087e-05,
"loss": 0.4674,
"step": 532
},
{
"epoch": 4.635473340587595,
"grad_norm": 1.4296875,
"learning_rate": 2.6869565217391306e-05,
"loss": 0.2464,
"step": 533
},
{
"epoch": 4.64417845484222,
"grad_norm": 1.3828125,
"learning_rate": 2.682608695652174e-05,
"loss": 0.2842,
"step": 534
},
{
"epoch": 4.652883569096844,
"grad_norm": 1.3515625,
"learning_rate": 2.6782608695652173e-05,
"loss": 0.3065,
"step": 535
},
{
"epoch": 4.661588683351469,
"grad_norm": 1.5078125,
"learning_rate": 2.673913043478261e-05,
"loss": 0.2565,
"step": 536
},
{
"epoch": 4.670293797606093,
"grad_norm": 1.46875,
"learning_rate": 2.6695652173913043e-05,
"loss": 0.2242,
"step": 537
},
{
"epoch": 4.6789989118607185,
"grad_norm": 1.3125,
"learning_rate": 2.6652173913043483e-05,
"loss": 0.3663,
"step": 538
},
{
"epoch": 4.687704026115343,
"grad_norm": 1.46875,
"learning_rate": 2.6608695652173913e-05,
"loss": 0.3065,
"step": 539
},
{
"epoch": 4.696409140369967,
"grad_norm": 1.5859375,
"learning_rate": 2.6565217391304347e-05,
"loss": 0.3606,
"step": 540
},
{
"epoch": 4.705114254624592,
"grad_norm": 1.65625,
"learning_rate": 2.6521739130434787e-05,
"loss": 0.4462,
"step": 541
},
{
"epoch": 4.713819368879216,
"grad_norm": 1.390625,
"learning_rate": 2.647826086956522e-05,
"loss": 0.257,
"step": 542
},
{
"epoch": 4.722524483133841,
"grad_norm": 1.7421875,
"learning_rate": 2.643478260869565e-05,
"loss": 0.4824,
"step": 543
},
{
"epoch": 4.731229597388466,
"grad_norm": 1.359375,
"learning_rate": 2.639130434782609e-05,
"loss": 0.3473,
"step": 544
},
{
"epoch": 4.73993471164309,
"grad_norm": 1.6171875,
"learning_rate": 2.6347826086956524e-05,
"loss": 0.4204,
"step": 545
},
{
"epoch": 4.748639825897715,
"grad_norm": 1.6484375,
"learning_rate": 2.6304347826086954e-05,
"loss": 0.3148,
"step": 546
},
{
"epoch": 4.75734494015234,
"grad_norm": 1.4921875,
"learning_rate": 2.6260869565217394e-05,
"loss": 0.368,
"step": 547
},
{
"epoch": 4.766050054406964,
"grad_norm": 1.6015625,
"learning_rate": 2.6217391304347828e-05,
"loss": 0.3937,
"step": 548
},
{
"epoch": 4.7747551686615886,
"grad_norm": 1.4375,
"learning_rate": 2.617391304347826e-05,
"loss": 0.4529,
"step": 549
},
{
"epoch": 4.783460282916213,
"grad_norm": 1.2890625,
"learning_rate": 2.6130434782608698e-05,
"loss": 0.2479,
"step": 550
},
{
"epoch": 4.792165397170838,
"grad_norm": 1.5,
"learning_rate": 2.608695652173913e-05,
"loss": 0.348,
"step": 551
},
{
"epoch": 4.800870511425463,
"grad_norm": 1.390625,
"learning_rate": 2.6043478260869565e-05,
"loss": 0.2708,
"step": 552
},
{
"epoch": 4.809575625680087,
"grad_norm": 1.359375,
"learning_rate": 2.6000000000000002e-05,
"loss": 0.3292,
"step": 553
},
{
"epoch": 4.818280739934711,
"grad_norm": 1.3359375,
"learning_rate": 2.5956521739130435e-05,
"loss": 0.2596,
"step": 554
},
{
"epoch": 4.826985854189337,
"grad_norm": 1.4921875,
"learning_rate": 2.5913043478260872e-05,
"loss": 0.4254,
"step": 555
},
{
"epoch": 4.835690968443961,
"grad_norm": 1.1484375,
"learning_rate": 2.5869565217391305e-05,
"loss": 0.2204,
"step": 556
},
{
"epoch": 4.8443960826985855,
"grad_norm": 1.7109375,
"learning_rate": 2.582608695652174e-05,
"loss": 0.3869,
"step": 557
},
{
"epoch": 4.85310119695321,
"grad_norm": 1.796875,
"learning_rate": 2.5782608695652176e-05,
"loss": 0.3907,
"step": 558
},
{
"epoch": 4.861806311207834,
"grad_norm": 1.390625,
"learning_rate": 2.573913043478261e-05,
"loss": 0.2937,
"step": 559
},
{
"epoch": 4.8705114254624595,
"grad_norm": 1.796875,
"learning_rate": 2.5695652173913043e-05,
"loss": 0.4582,
"step": 560
},
{
"epoch": 4.879216539717084,
"grad_norm": 1.5625,
"learning_rate": 2.5652173913043483e-05,
"loss": 0.5466,
"step": 561
},
{
"epoch": 4.887921653971708,
"grad_norm": 1.7265625,
"learning_rate": 2.5608695652173913e-05,
"loss": 0.2614,
"step": 562
},
{
"epoch": 4.896626768226333,
"grad_norm": 1.5390625,
"learning_rate": 2.5565217391304346e-05,
"loss": 0.3748,
"step": 563
},
{
"epoch": 4.905331882480958,
"grad_norm": 1.7578125,
"learning_rate": 2.5521739130434786e-05,
"loss": 0.5146,
"step": 564
},
{
"epoch": 4.914036996735582,
"grad_norm": 1.5234375,
"learning_rate": 2.5478260869565217e-05,
"loss": 0.2761,
"step": 565
},
{
"epoch": 4.922742110990207,
"grad_norm": 1.265625,
"learning_rate": 2.543478260869565e-05,
"loss": 0.2113,
"step": 566
},
{
"epoch": 4.931447225244831,
"grad_norm": 1.4453125,
"learning_rate": 2.539130434782609e-05,
"loss": 0.2445,
"step": 567
},
{
"epoch": 4.9401523394994555,
"grad_norm": 1.3515625,
"learning_rate": 2.5347826086956524e-05,
"loss": 0.2419,
"step": 568
},
{
"epoch": 4.948857453754081,
"grad_norm": 1.3359375,
"learning_rate": 2.530434782608696e-05,
"loss": 0.205,
"step": 569
},
{
"epoch": 4.957562568008705,
"grad_norm": 1.5546875,
"learning_rate": 2.5260869565217394e-05,
"loss": 0.2996,
"step": 570
},
{
"epoch": 4.96626768226333,
"grad_norm": 1.3671875,
"learning_rate": 2.5217391304347827e-05,
"loss": 0.4251,
"step": 571
},
{
"epoch": 4.974972796517954,
"grad_norm": 1.5546875,
"learning_rate": 2.5173913043478264e-05,
"loss": 0.3702,
"step": 572
},
{
"epoch": 4.983677910772579,
"grad_norm": 2.015625,
"learning_rate": 2.5130434782608698e-05,
"loss": 0.3779,
"step": 573
},
{
"epoch": 4.992383025027204,
"grad_norm": 1.28125,
"learning_rate": 2.508695652173913e-05,
"loss": 0.4117,
"step": 574
},
{
"epoch": 5.0,
"grad_norm": 1.5234375,
"learning_rate": 2.5043478260869568e-05,
"loss": 0.3301,
"step": 575
},
{
"epoch": 5.008705114254624,
"grad_norm": 1.2578125,
"learning_rate": 2.5e-05,
"loss": 0.3133,
"step": 576
},
{
"epoch": 5.017410228509249,
"grad_norm": 1.3203125,
"learning_rate": 2.4956521739130438e-05,
"loss": 0.3616,
"step": 577
},
{
"epoch": 5.026115342763874,
"grad_norm": 1.375,
"learning_rate": 2.4913043478260868e-05,
"loss": 0.2147,
"step": 578
},
{
"epoch": 5.0348204570184985,
"grad_norm": 1.2734375,
"learning_rate": 2.4869565217391305e-05,
"loss": 0.169,
"step": 579
},
{
"epoch": 5.043525571273123,
"grad_norm": 1.6484375,
"learning_rate": 2.4826086956521742e-05,
"loss": 0.2304,
"step": 580
},
{
"epoch": 5.052230685527747,
"grad_norm": 1.5078125,
"learning_rate": 2.4782608695652175e-05,
"loss": 0.1854,
"step": 581
},
{
"epoch": 5.0609357997823725,
"grad_norm": 1.5703125,
"learning_rate": 2.473913043478261e-05,
"loss": 0.3501,
"step": 582
},
{
"epoch": 5.069640914036997,
"grad_norm": 1.671875,
"learning_rate": 2.4695652173913045e-05,
"loss": 0.2407,
"step": 583
},
{
"epoch": 5.078346028291621,
"grad_norm": 1.6953125,
"learning_rate": 2.465217391304348e-05,
"loss": 0.147,
"step": 584
},
{
"epoch": 5.087051142546246,
"grad_norm": 1.8828125,
"learning_rate": 2.4608695652173912e-05,
"loss": 0.193,
"step": 585
},
{
"epoch": 5.09575625680087,
"grad_norm": 1.765625,
"learning_rate": 2.456521739130435e-05,
"loss": 0.1877,
"step": 586
},
{
"epoch": 5.104461371055495,
"grad_norm": 1.8046875,
"learning_rate": 2.4521739130434786e-05,
"loss": 0.2971,
"step": 587
},
{
"epoch": 5.11316648531012,
"grad_norm": 1.453125,
"learning_rate": 2.447826086956522e-05,
"loss": 0.2993,
"step": 588
},
{
"epoch": 5.121871599564744,
"grad_norm": 1.515625,
"learning_rate": 2.4434782608695653e-05,
"loss": 0.163,
"step": 589
},
{
"epoch": 5.1305767138193685,
"grad_norm": 1.359375,
"learning_rate": 2.439130434782609e-05,
"loss": 0.2468,
"step": 590
},
{
"epoch": 5.139281828073994,
"grad_norm": 1.234375,
"learning_rate": 2.4347826086956523e-05,
"loss": 0.4714,
"step": 591
},
{
"epoch": 5.147986942328618,
"grad_norm": 1.2265625,
"learning_rate": 2.4304347826086956e-05,
"loss": 0.1436,
"step": 592
},
{
"epoch": 5.156692056583243,
"grad_norm": 1.171875,
"learning_rate": 2.4260869565217393e-05,
"loss": 0.1693,
"step": 593
},
{
"epoch": 5.165397170837867,
"grad_norm": 1.3984375,
"learning_rate": 2.4217391304347827e-05,
"loss": 0.2572,
"step": 594
},
{
"epoch": 5.174102285092492,
"grad_norm": 2.421875,
"learning_rate": 2.417391304347826e-05,
"loss": 0.2728,
"step": 595
},
{
"epoch": 5.182807399347117,
"grad_norm": 1.328125,
"learning_rate": 2.4130434782608697e-05,
"loss": 0.1908,
"step": 596
},
{
"epoch": 5.191512513601741,
"grad_norm": 1.7578125,
"learning_rate": 2.408695652173913e-05,
"loss": 0.2543,
"step": 597
},
{
"epoch": 5.200217627856365,
"grad_norm": 1.2890625,
"learning_rate": 2.4043478260869567e-05,
"loss": 0.1825,
"step": 598
},
{
"epoch": 5.20892274211099,
"grad_norm": 1.4921875,
"learning_rate": 2.4e-05,
"loss": 0.2256,
"step": 599
},
{
"epoch": 5.217627856365615,
"grad_norm": 1.375,
"learning_rate": 2.3956521739130437e-05,
"loss": 0.2383,
"step": 600
},
{
"epoch": 5.2263329706202395,
"grad_norm": 1.484375,
"learning_rate": 2.391304347826087e-05,
"loss": 0.2923,
"step": 601
},
{
"epoch": 5.235038084874864,
"grad_norm": 1.5078125,
"learning_rate": 2.3869565217391304e-05,
"loss": 0.3034,
"step": 602
},
{
"epoch": 5.243743199129488,
"grad_norm": 1.4296875,
"learning_rate": 2.382608695652174e-05,
"loss": 0.1643,
"step": 603
},
{
"epoch": 5.2524483133841136,
"grad_norm": 1.4453125,
"learning_rate": 2.3782608695652175e-05,
"loss": 0.2489,
"step": 604
},
{
"epoch": 5.261153427638738,
"grad_norm": 1.6640625,
"learning_rate": 2.373913043478261e-05,
"loss": 0.3017,
"step": 605
},
{
"epoch": 5.269858541893362,
"grad_norm": 1.3828125,
"learning_rate": 2.3695652173913045e-05,
"loss": 0.1751,
"step": 606
},
{
"epoch": 5.278563656147987,
"grad_norm": 1.421875,
"learning_rate": 2.3652173913043478e-05,
"loss": 0.1485,
"step": 607
},
{
"epoch": 5.287268770402612,
"grad_norm": 1.578125,
"learning_rate": 2.3608695652173915e-05,
"loss": 0.1679,
"step": 608
},
{
"epoch": 5.295973884657236,
"grad_norm": 1.6171875,
"learning_rate": 2.356521739130435e-05,
"loss": 0.162,
"step": 609
},
{
"epoch": 5.304678998911861,
"grad_norm": 1.3203125,
"learning_rate": 2.3521739130434782e-05,
"loss": 0.1554,
"step": 610
},
{
"epoch": 5.313384113166485,
"grad_norm": 1.3203125,
"learning_rate": 2.347826086956522e-05,
"loss": 0.2891,
"step": 611
},
{
"epoch": 5.32208922742111,
"grad_norm": 1.4765625,
"learning_rate": 2.3434782608695656e-05,
"loss": 0.3698,
"step": 612
},
{
"epoch": 5.330794341675735,
"grad_norm": 1.390625,
"learning_rate": 2.339130434782609e-05,
"loss": 0.1886,
"step": 613
},
{
"epoch": 5.339499455930359,
"grad_norm": 1.4609375,
"learning_rate": 2.3347826086956522e-05,
"loss": 0.1689,
"step": 614
},
{
"epoch": 5.348204570184984,
"grad_norm": 1.734375,
"learning_rate": 2.330434782608696e-05,
"loss": 0.2938,
"step": 615
},
{
"epoch": 5.356909684439608,
"grad_norm": 1.5703125,
"learning_rate": 2.3260869565217393e-05,
"loss": 0.2641,
"step": 616
},
{
"epoch": 5.365614798694233,
"grad_norm": 1.4140625,
"learning_rate": 2.3217391304347826e-05,
"loss": 0.1735,
"step": 617
},
{
"epoch": 5.374319912948858,
"grad_norm": 1.7421875,
"learning_rate": 2.3173913043478263e-05,
"loss": 0.3248,
"step": 618
},
{
"epoch": 5.383025027203482,
"grad_norm": 1.4453125,
"learning_rate": 2.31304347826087e-05,
"loss": 0.1592,
"step": 619
},
{
"epoch": 5.3917301414581065,
"grad_norm": 1.4765625,
"learning_rate": 2.308695652173913e-05,
"loss": 0.1805,
"step": 620
},
{
"epoch": 5.400435255712731,
"grad_norm": 1.53125,
"learning_rate": 2.3043478260869567e-05,
"loss": 0.2743,
"step": 621
},
{
"epoch": 5.409140369967356,
"grad_norm": 1.3203125,
"learning_rate": 2.3000000000000003e-05,
"loss": 0.2918,
"step": 622
},
{
"epoch": 5.4178454842219805,
"grad_norm": 2.28125,
"learning_rate": 2.2956521739130433e-05,
"loss": 0.3951,
"step": 623
},
{
"epoch": 5.426550598476605,
"grad_norm": 1.375,
"learning_rate": 2.291304347826087e-05,
"loss": 0.2319,
"step": 624
},
{
"epoch": 5.435255712731229,
"grad_norm": 1.4765625,
"learning_rate": 2.2869565217391307e-05,
"loss": 0.2565,
"step": 625
},
{
"epoch": 5.443960826985855,
"grad_norm": 1.609375,
"learning_rate": 2.282608695652174e-05,
"loss": 0.1877,
"step": 626
},
{
"epoch": 5.452665941240479,
"grad_norm": 1.3359375,
"learning_rate": 2.2782608695652174e-05,
"loss": 0.1451,
"step": 627
},
{
"epoch": 5.461371055495103,
"grad_norm": 1.53125,
"learning_rate": 2.273913043478261e-05,
"loss": 0.2011,
"step": 628
},
{
"epoch": 5.470076169749728,
"grad_norm": 1.40625,
"learning_rate": 2.2695652173913044e-05,
"loss": 0.1698,
"step": 629
},
{
"epoch": 5.478781284004352,
"grad_norm": 1.3984375,
"learning_rate": 2.2652173913043478e-05,
"loss": 0.1567,
"step": 630
},
{
"epoch": 5.487486398258977,
"grad_norm": 1.296875,
"learning_rate": 2.2608695652173914e-05,
"loss": 0.1489,
"step": 631
},
{
"epoch": 5.496191512513602,
"grad_norm": 1.3671875,
"learning_rate": 2.256521739130435e-05,
"loss": 0.1715,
"step": 632
},
{
"epoch": 5.504896626768226,
"grad_norm": 1.8984375,
"learning_rate": 2.252173913043478e-05,
"loss": 0.2442,
"step": 633
},
{
"epoch": 5.513601741022851,
"grad_norm": 1.7265625,
"learning_rate": 2.2478260869565218e-05,
"loss": 0.5451,
"step": 634
},
{
"epoch": 5.522306855277476,
"grad_norm": 1.3671875,
"learning_rate": 2.2434782608695655e-05,
"loss": 0.312,
"step": 635
},
{
"epoch": 5.5310119695321,
"grad_norm": 1.5703125,
"learning_rate": 2.239130434782609e-05,
"loss": 0.4152,
"step": 636
},
{
"epoch": 5.539717083786725,
"grad_norm": 1.375,
"learning_rate": 2.2347826086956522e-05,
"loss": 0.5363,
"step": 637
},
{
"epoch": 5.548422198041349,
"grad_norm": 1.3203125,
"learning_rate": 2.230434782608696e-05,
"loss": 0.1844,
"step": 638
},
{
"epoch": 5.5571273122959735,
"grad_norm": 1.9140625,
"learning_rate": 2.2260869565217392e-05,
"loss": 0.1978,
"step": 639
},
{
"epoch": 5.565832426550599,
"grad_norm": 1.703125,
"learning_rate": 2.2217391304347825e-05,
"loss": 0.2888,
"step": 640
},
{
"epoch": 5.574537540805223,
"grad_norm": 1.484375,
"learning_rate": 2.2173913043478262e-05,
"loss": 0.149,
"step": 641
},
{
"epoch": 5.5832426550598475,
"grad_norm": 1.53125,
"learning_rate": 2.2130434782608696e-05,
"loss": 0.3461,
"step": 642
},
{
"epoch": 5.591947769314472,
"grad_norm": 1.5390625,
"learning_rate": 2.2086956521739133e-05,
"loss": 0.4538,
"step": 643
},
{
"epoch": 5.600652883569097,
"grad_norm": 1.6171875,
"learning_rate": 2.2043478260869566e-05,
"loss": 0.1924,
"step": 644
},
{
"epoch": 5.609357997823722,
"grad_norm": 1.890625,
"learning_rate": 2.2000000000000003e-05,
"loss": 0.2456,
"step": 645
},
{
"epoch": 5.618063112078346,
"grad_norm": 1.3671875,
"learning_rate": 2.1956521739130436e-05,
"loss": 0.1566,
"step": 646
},
{
"epoch": 5.62676822633297,
"grad_norm": 1.390625,
"learning_rate": 2.191304347826087e-05,
"loss": 0.2101,
"step": 647
},
{
"epoch": 5.635473340587595,
"grad_norm": 1.421875,
"learning_rate": 2.1869565217391306e-05,
"loss": 0.266,
"step": 648
},
{
"epoch": 5.64417845484222,
"grad_norm": 1.7265625,
"learning_rate": 2.182608695652174e-05,
"loss": 0.4792,
"step": 649
},
{
"epoch": 5.652883569096844,
"grad_norm": 1.5234375,
"learning_rate": 2.1782608695652177e-05,
"loss": 0.1806,
"step": 650
},
{
"epoch": 5.661588683351469,
"grad_norm": 1.4609375,
"learning_rate": 2.173913043478261e-05,
"loss": 0.2714,
"step": 651
},
{
"epoch": 5.670293797606093,
"grad_norm": 1.6171875,
"learning_rate": 2.1695652173913044e-05,
"loss": 0.2635,
"step": 652
},
{
"epoch": 5.6789989118607185,
"grad_norm": 1.4453125,
"learning_rate": 2.165217391304348e-05,
"loss": 0.1836,
"step": 653
},
{
"epoch": 5.687704026115343,
"grad_norm": 1.4765625,
"learning_rate": 2.1608695652173914e-05,
"loss": 0.4014,
"step": 654
},
{
"epoch": 5.696409140369967,
"grad_norm": 1.171875,
"learning_rate": 2.1565217391304347e-05,
"loss": 0.1997,
"step": 655
},
{
"epoch": 5.705114254624592,
"grad_norm": 1.34375,
"learning_rate": 2.1521739130434784e-05,
"loss": 0.1798,
"step": 656
},
{
"epoch": 5.713819368879216,
"grad_norm": 1.4140625,
"learning_rate": 2.1478260869565218e-05,
"loss": 0.1653,
"step": 657
},
{
"epoch": 5.722524483133841,
"grad_norm": 1.4765625,
"learning_rate": 2.1434782608695654e-05,
"loss": 0.1735,
"step": 658
},
{
"epoch": 5.731229597388466,
"grad_norm": 1.4765625,
"learning_rate": 2.1391304347826088e-05,
"loss": 0.1909,
"step": 659
},
{
"epoch": 5.73993471164309,
"grad_norm": 1.359375,
"learning_rate": 2.1347826086956525e-05,
"loss": 0.239,
"step": 660
},
{
"epoch": 5.748639825897715,
"grad_norm": 1.3203125,
"learning_rate": 2.1304347826086958e-05,
"loss": 0.1403,
"step": 661
},
{
"epoch": 5.75734494015234,
"grad_norm": 1.234375,
"learning_rate": 2.126086956521739e-05,
"loss": 0.2705,
"step": 662
},
{
"epoch": 5.766050054406964,
"grad_norm": 1.3046875,
"learning_rate": 2.1217391304347828e-05,
"loss": 0.3939,
"step": 663
},
{
"epoch": 5.7747551686615886,
"grad_norm": 1.4296875,
"learning_rate": 2.1173913043478262e-05,
"loss": 0.2337,
"step": 664
},
{
"epoch": 5.783460282916213,
"grad_norm": 1.3828125,
"learning_rate": 2.1130434782608695e-05,
"loss": 0.1415,
"step": 665
},
{
"epoch": 5.792165397170838,
"grad_norm": 1.578125,
"learning_rate": 2.1086956521739132e-05,
"loss": 0.3502,
"step": 666
},
{
"epoch": 5.800870511425463,
"grad_norm": 1.4375,
"learning_rate": 2.104347826086957e-05,
"loss": 0.2582,
"step": 667
},
{
"epoch": 5.809575625680087,
"grad_norm": 1.5234375,
"learning_rate": 2.1e-05,
"loss": 0.2606,
"step": 668
},
{
"epoch": 5.818280739934711,
"grad_norm": 1.453125,
"learning_rate": 2.0956521739130436e-05,
"loss": 0.2181,
"step": 669
},
{
"epoch": 5.826985854189337,
"grad_norm": 1.6484375,
"learning_rate": 2.0913043478260872e-05,
"loss": 0.2898,
"step": 670
},
{
"epoch": 5.835690968443961,
"grad_norm": 1.5546875,
"learning_rate": 2.0869565217391303e-05,
"loss": 0.313,
"step": 671
},
{
"epoch": 5.8443960826985855,
"grad_norm": 1.5703125,
"learning_rate": 2.082608695652174e-05,
"loss": 0.2565,
"step": 672
},
{
"epoch": 5.85310119695321,
"grad_norm": 1.4609375,
"learning_rate": 2.0782608695652176e-05,
"loss": 0.1933,
"step": 673
},
{
"epoch": 5.861806311207834,
"grad_norm": 1.6328125,
"learning_rate": 2.073913043478261e-05,
"loss": 0.1794,
"step": 674
},
{
"epoch": 5.8705114254624595,
"grad_norm": 1.625,
"learning_rate": 2.0695652173913043e-05,
"loss": 0.3617,
"step": 675
},
{
"epoch": 5.879216539717084,
"grad_norm": 2.046875,
"learning_rate": 2.065217391304348e-05,
"loss": 0.258,
"step": 676
},
{
"epoch": 5.887921653971708,
"grad_norm": 1.28125,
"learning_rate": 2.0608695652173913e-05,
"loss": 0.2287,
"step": 677
},
{
"epoch": 5.896626768226333,
"grad_norm": 1.34375,
"learning_rate": 2.0565217391304347e-05,
"loss": 0.2913,
"step": 678
},
{
"epoch": 5.905331882480958,
"grad_norm": 1.796875,
"learning_rate": 2.0521739130434784e-05,
"loss": 0.4219,
"step": 679
},
{
"epoch": 5.914036996735582,
"grad_norm": 1.421875,
"learning_rate": 2.047826086956522e-05,
"loss": 0.2396,
"step": 680
},
{
"epoch": 5.922742110990207,
"grad_norm": 1.3046875,
"learning_rate": 2.0434782608695654e-05,
"loss": 0.152,
"step": 681
},
{
"epoch": 5.931447225244831,
"grad_norm": 1.4140625,
"learning_rate": 2.0391304347826087e-05,
"loss": 0.2087,
"step": 682
},
{
"epoch": 5.9401523394994555,
"grad_norm": 1.4453125,
"learning_rate": 2.0347826086956524e-05,
"loss": 0.1789,
"step": 683
},
{
"epoch": 5.948857453754081,
"grad_norm": 1.2421875,
"learning_rate": 2.0304347826086957e-05,
"loss": 0.1685,
"step": 684
},
{
"epoch": 5.957562568008705,
"grad_norm": 1.515625,
"learning_rate": 2.026086956521739e-05,
"loss": 0.2052,
"step": 685
},
{
"epoch": 5.96626768226333,
"grad_norm": 1.4921875,
"learning_rate": 2.0217391304347828e-05,
"loss": 0.1638,
"step": 686
},
{
"epoch": 5.974972796517954,
"grad_norm": 1.4375,
"learning_rate": 2.017391304347826e-05,
"loss": 0.1529,
"step": 687
},
{
"epoch": 5.983677910772579,
"grad_norm": 1.671875,
"learning_rate": 2.0130434782608695e-05,
"loss": 0.2103,
"step": 688
},
{
"epoch": 5.992383025027204,
"grad_norm": 1.4453125,
"learning_rate": 2.008695652173913e-05,
"loss": 0.2846,
"step": 689
},
{
"epoch": 6.0,
"grad_norm": 1.53125,
"learning_rate": 2.0043478260869565e-05,
"loss": 0.1696,
"step": 690
},
{
"epoch": 6.008705114254624,
"grad_norm": 1.1015625,
"learning_rate": 2e-05,
"loss": 0.1046,
"step": 691
},
{
"epoch": 6.017410228509249,
"grad_norm": 1.3359375,
"learning_rate": 1.9956521739130435e-05,
"loss": 0.13,
"step": 692
},
{
"epoch": 6.026115342763874,
"grad_norm": 1.1875,
"learning_rate": 1.9913043478260872e-05,
"loss": 0.1104,
"step": 693
},
{
"epoch": 6.0348204570184985,
"grad_norm": 1.4609375,
"learning_rate": 1.9869565217391305e-05,
"loss": 0.3832,
"step": 694
},
{
"epoch": 6.043525571273123,
"grad_norm": 1.2109375,
"learning_rate": 1.982608695652174e-05,
"loss": 0.1218,
"step": 695
},
{
"epoch": 6.052230685527747,
"grad_norm": 1.0625,
"learning_rate": 1.9782608695652176e-05,
"loss": 0.1026,
"step": 696
},
{
"epoch": 6.0609357997823725,
"grad_norm": 1.1875,
"learning_rate": 1.973913043478261e-05,
"loss": 0.1025,
"step": 697
},
{
"epoch": 6.069640914036997,
"grad_norm": 1.8515625,
"learning_rate": 1.9695652173913046e-05,
"loss": 0.2095,
"step": 698
},
{
"epoch": 6.078346028291621,
"grad_norm": 2.0625,
"learning_rate": 1.965217391304348e-05,
"loss": 0.3106,
"step": 699
},
{
"epoch": 6.087051142546246,
"grad_norm": 2.546875,
"learning_rate": 1.9608695652173913e-05,
"loss": 0.3309,
"step": 700
},
{
"epoch": 6.09575625680087,
"grad_norm": 2.015625,
"learning_rate": 1.956521739130435e-05,
"loss": 0.1225,
"step": 701
},
{
"epoch": 6.104461371055495,
"grad_norm": 1.9609375,
"learning_rate": 1.9521739130434783e-05,
"loss": 0.1258,
"step": 702
},
{
"epoch": 6.11316648531012,
"grad_norm": 1.9296875,
"learning_rate": 1.9478260869565216e-05,
"loss": 0.1622,
"step": 703
},
{
"epoch": 6.121871599564744,
"grad_norm": 1.4140625,
"learning_rate": 1.9434782608695653e-05,
"loss": 0.2628,
"step": 704
},
{
"epoch": 6.1305767138193685,
"grad_norm": 1.4375,
"learning_rate": 1.939130434782609e-05,
"loss": 0.3527,
"step": 705
},
{
"epoch": 6.139281828073994,
"grad_norm": 1.4453125,
"learning_rate": 1.9347826086956523e-05,
"loss": 0.1771,
"step": 706
},
{
"epoch": 6.147986942328618,
"grad_norm": 1.2109375,
"learning_rate": 1.9304347826086957e-05,
"loss": 0.1159,
"step": 707
},
{
"epoch": 6.156692056583243,
"grad_norm": 1.4453125,
"learning_rate": 1.9260869565217394e-05,
"loss": 0.1815,
"step": 708
},
{
"epoch": 6.165397170837867,
"grad_norm": 1.7578125,
"learning_rate": 1.9217391304347827e-05,
"loss": 0.1239,
"step": 709
},
{
"epoch": 6.174102285092492,
"grad_norm": 1.1484375,
"learning_rate": 1.917391304347826e-05,
"loss": 0.1047,
"step": 710
},
{
"epoch": 6.182807399347117,
"grad_norm": 1.2890625,
"learning_rate": 1.9130434782608697e-05,
"loss": 0.2231,
"step": 711
},
{
"epoch": 6.191512513601741,
"grad_norm": 1.1796875,
"learning_rate": 1.9086956521739134e-05,
"loss": 0.2026,
"step": 712
},
{
"epoch": 6.200217627856365,
"grad_norm": 1.03125,
"learning_rate": 1.9043478260869564e-05,
"loss": 0.1102,
"step": 713
},
{
"epoch": 6.20892274211099,
"grad_norm": 1.109375,
"learning_rate": 1.9e-05,
"loss": 0.2475,
"step": 714
},
{
"epoch": 6.217627856365615,
"grad_norm": 1.296875,
"learning_rate": 1.8956521739130438e-05,
"loss": 0.1131,
"step": 715
},
{
"epoch": 6.2263329706202395,
"grad_norm": 1.234375,
"learning_rate": 1.8913043478260868e-05,
"loss": 0.1226,
"step": 716
},
{
"epoch": 6.235038084874864,
"grad_norm": 1.4765625,
"learning_rate": 1.8869565217391305e-05,
"loss": 0.2253,
"step": 717
},
{
"epoch": 6.243743199129488,
"grad_norm": 1.46875,
"learning_rate": 1.882608695652174e-05,
"loss": 0.1235,
"step": 718
},
{
"epoch": 6.2524483133841136,
"grad_norm": 1.34375,
"learning_rate": 1.8782608695652175e-05,
"loss": 0.111,
"step": 719
},
{
"epoch": 6.261153427638738,
"grad_norm": 1.4296875,
"learning_rate": 1.873913043478261e-05,
"loss": 0.1061,
"step": 720
},
{
"epoch": 6.269858541893362,
"grad_norm": 1.421875,
"learning_rate": 1.8695652173913045e-05,
"loss": 0.2592,
"step": 721
},
{
"epoch": 6.278563656147987,
"grad_norm": 1.4140625,
"learning_rate": 1.865217391304348e-05,
"loss": 0.3231,
"step": 722
},
{
"epoch": 6.287268770402612,
"grad_norm": 1.1796875,
"learning_rate": 1.8608695652173912e-05,
"loss": 0.1606,
"step": 723
},
{
"epoch": 6.295973884657236,
"grad_norm": 1.2265625,
"learning_rate": 1.856521739130435e-05,
"loss": 0.1071,
"step": 724
},
{
"epoch": 6.304678998911861,
"grad_norm": 1.3828125,
"learning_rate": 1.8521739130434786e-05,
"loss": 0.1893,
"step": 725
},
{
"epoch": 6.313384113166485,
"grad_norm": 1.1953125,
"learning_rate": 1.8478260869565216e-05,
"loss": 0.1281,
"step": 726
},
{
"epoch": 6.32208922742111,
"grad_norm": 1.6171875,
"learning_rate": 1.8434782608695653e-05,
"loss": 0.4475,
"step": 727
},
{
"epoch": 6.330794341675735,
"grad_norm": 1.5,
"learning_rate": 1.839130434782609e-05,
"loss": 0.2581,
"step": 728
},
{
"epoch": 6.339499455930359,
"grad_norm": 1.171875,
"learning_rate": 1.8347826086956523e-05,
"loss": 0.2401,
"step": 729
},
{
"epoch": 6.348204570184984,
"grad_norm": 1.34375,
"learning_rate": 1.8304347826086956e-05,
"loss": 0.2701,
"step": 730
},
{
"epoch": 6.356909684439608,
"grad_norm": 1.578125,
"learning_rate": 1.8260869565217393e-05,
"loss": 0.2357,
"step": 731
},
{
"epoch": 6.365614798694233,
"grad_norm": 1.4140625,
"learning_rate": 1.8217391304347827e-05,
"loss": 0.2872,
"step": 732
},
{
"epoch": 6.374319912948858,
"grad_norm": 1.2578125,
"learning_rate": 1.817391304347826e-05,
"loss": 0.2282,
"step": 733
},
{
"epoch": 6.383025027203482,
"grad_norm": 1.3359375,
"learning_rate": 1.8130434782608697e-05,
"loss": 0.1119,
"step": 734
},
{
"epoch": 6.3917301414581065,
"grad_norm": 1.328125,
"learning_rate": 1.808695652173913e-05,
"loss": 0.1212,
"step": 735
},
{
"epoch": 6.400435255712731,
"grad_norm": 1.4609375,
"learning_rate": 1.8043478260869567e-05,
"loss": 0.1622,
"step": 736
},
{
"epoch": 6.409140369967356,
"grad_norm": 1.34375,
"learning_rate": 1.8e-05,
"loss": 0.1371,
"step": 737
},
{
"epoch": 6.4178454842219805,
"grad_norm": 1.25,
"learning_rate": 1.7956521739130437e-05,
"loss": 0.3106,
"step": 738
},
{
"epoch": 6.426550598476605,
"grad_norm": 1.171875,
"learning_rate": 1.791304347826087e-05,
"loss": 0.158,
"step": 739
},
{
"epoch": 6.435255712731229,
"grad_norm": 1.21875,
"learning_rate": 1.7869565217391304e-05,
"loss": 0.1296,
"step": 740
},
{
"epoch": 6.443960826985855,
"grad_norm": 1.2265625,
"learning_rate": 1.782608695652174e-05,
"loss": 0.1785,
"step": 741
},
{
"epoch": 6.452665941240479,
"grad_norm": 1.46875,
"learning_rate": 1.7782608695652174e-05,
"loss": 0.1187,
"step": 742
},
{
"epoch": 6.461371055495103,
"grad_norm": 1.796875,
"learning_rate": 1.7739130434782608e-05,
"loss": 0.1874,
"step": 743
},
{
"epoch": 6.470076169749728,
"grad_norm": 0.9921875,
"learning_rate": 1.7695652173913045e-05,
"loss": 0.0773,
"step": 744
},
{
"epoch": 6.478781284004352,
"grad_norm": 1.0390625,
"learning_rate": 1.7652173913043478e-05,
"loss": 0.1084,
"step": 745
},
{
"epoch": 6.487486398258977,
"grad_norm": 1.4375,
"learning_rate": 1.7608695652173915e-05,
"loss": 0.2976,
"step": 746
},
{
"epoch": 6.496191512513602,
"grad_norm": 1.375,
"learning_rate": 1.756521739130435e-05,
"loss": 0.1624,
"step": 747
},
{
"epoch": 6.504896626768226,
"grad_norm": 1.1953125,
"learning_rate": 1.7521739130434782e-05,
"loss": 0.1115,
"step": 748
},
{
"epoch": 6.513601741022851,
"grad_norm": 1.2109375,
"learning_rate": 1.747826086956522e-05,
"loss": 0.1687,
"step": 749
},
{
"epoch": 6.522306855277476,
"grad_norm": 1.3203125,
"learning_rate": 1.7434782608695652e-05,
"loss": 0.0972,
"step": 750
},
{
"epoch": 6.5310119695321,
"grad_norm": 1.21875,
"learning_rate": 1.739130434782609e-05,
"loss": 0.4203,
"step": 751
},
{
"epoch": 6.539717083786725,
"grad_norm": 1.4765625,
"learning_rate": 1.7347826086956522e-05,
"loss": 0.1758,
"step": 752
},
{
"epoch": 6.548422198041349,
"grad_norm": 1.2578125,
"learning_rate": 1.730434782608696e-05,
"loss": 0.1192,
"step": 753
},
{
"epoch": 6.5571273122959735,
"grad_norm": 1.265625,
"learning_rate": 1.7260869565217393e-05,
"loss": 0.2223,
"step": 754
},
{
"epoch": 6.565832426550599,
"grad_norm": 1.53125,
"learning_rate": 1.7217391304347826e-05,
"loss": 0.1212,
"step": 755
},
{
"epoch": 6.574537540805223,
"grad_norm": 1.3046875,
"learning_rate": 1.7173913043478263e-05,
"loss": 0.1626,
"step": 756
},
{
"epoch": 6.5832426550598475,
"grad_norm": 1.3046875,
"learning_rate": 1.7130434782608696e-05,
"loss": 0.1186,
"step": 757
},
{
"epoch": 6.591947769314472,
"grad_norm": 1.625,
"learning_rate": 1.708695652173913e-05,
"loss": 0.1474,
"step": 758
},
{
"epoch": 6.600652883569097,
"grad_norm": 1.2734375,
"learning_rate": 1.7043478260869566e-05,
"loss": 0.2377,
"step": 759
},
{
"epoch": 6.609357997823722,
"grad_norm": 1.421875,
"learning_rate": 1.7000000000000003e-05,
"loss": 0.121,
"step": 760
},
{
"epoch": 6.618063112078346,
"grad_norm": 1.4609375,
"learning_rate": 1.6956521739130433e-05,
"loss": 0.1173,
"step": 761
},
{
"epoch": 6.62676822633297,
"grad_norm": 1.4453125,
"learning_rate": 1.691304347826087e-05,
"loss": 0.1249,
"step": 762
},
{
"epoch": 6.635473340587595,
"grad_norm": 1.3828125,
"learning_rate": 1.6869565217391307e-05,
"loss": 0.4696,
"step": 763
},
{
"epoch": 6.64417845484222,
"grad_norm": 1.46875,
"learning_rate": 1.682608695652174e-05,
"loss": 0.2748,
"step": 764
},
{
"epoch": 6.652883569096844,
"grad_norm": 1.421875,
"learning_rate": 1.6782608695652174e-05,
"loss": 0.2446,
"step": 765
},
{
"epoch": 6.661588683351469,
"grad_norm": 1.2421875,
"learning_rate": 1.673913043478261e-05,
"loss": 0.2472,
"step": 766
},
{
"epoch": 6.670293797606093,
"grad_norm": 1.921875,
"learning_rate": 1.6695652173913044e-05,
"loss": 0.112,
"step": 767
},
{
"epoch": 6.6789989118607185,
"grad_norm": 2.015625,
"learning_rate": 1.6652173913043477e-05,
"loss": 0.2588,
"step": 768
},
{
"epoch": 6.687704026115343,
"grad_norm": 1.3671875,
"learning_rate": 1.6608695652173914e-05,
"loss": 0.2644,
"step": 769
},
{
"epoch": 6.696409140369967,
"grad_norm": 1.640625,
"learning_rate": 1.656521739130435e-05,
"loss": 0.2227,
"step": 770
},
{
"epoch": 6.705114254624592,
"grad_norm": 1.390625,
"learning_rate": 1.652173913043478e-05,
"loss": 0.137,
"step": 771
},
{
"epoch": 6.713819368879216,
"grad_norm": 1.3046875,
"learning_rate": 1.6478260869565218e-05,
"loss": 0.1273,
"step": 772
},
{
"epoch": 6.722524483133841,
"grad_norm": 1.1015625,
"learning_rate": 1.6434782608695655e-05,
"loss": 0.1055,
"step": 773
},
{
"epoch": 6.731229597388466,
"grad_norm": 1.6640625,
"learning_rate": 1.6391304347826085e-05,
"loss": 0.3052,
"step": 774
},
{
"epoch": 6.73993471164309,
"grad_norm": 1.609375,
"learning_rate": 1.634782608695652e-05,
"loss": 0.2773,
"step": 775
},
{
"epoch": 6.748639825897715,
"grad_norm": 1.3984375,
"learning_rate": 1.630434782608696e-05,
"loss": 0.2021,
"step": 776
},
{
"epoch": 6.75734494015234,
"grad_norm": 1.3203125,
"learning_rate": 1.6260869565217392e-05,
"loss": 0.158,
"step": 777
},
{
"epoch": 6.766050054406964,
"grad_norm": 1.3125,
"learning_rate": 1.6217391304347825e-05,
"loss": 0.2299,
"step": 778
},
{
"epoch": 6.7747551686615886,
"grad_norm": 1.703125,
"learning_rate": 1.6173913043478262e-05,
"loss": 0.1127,
"step": 779
},
{
"epoch": 6.783460282916213,
"grad_norm": 1.375,
"learning_rate": 1.6130434782608696e-05,
"loss": 0.3321,
"step": 780
},
{
"epoch": 6.792165397170838,
"grad_norm": 1.1953125,
"learning_rate": 1.608695652173913e-05,
"loss": 0.2294,
"step": 781
},
{
"epoch": 6.800870511425463,
"grad_norm": 2.0,
"learning_rate": 1.6043478260869566e-05,
"loss": 0.2099,
"step": 782
},
{
"epoch": 6.809575625680087,
"grad_norm": 1.3359375,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.1154,
"step": 783
},
{
"epoch": 6.818280739934711,
"grad_norm": 1.34375,
"learning_rate": 1.5956521739130436e-05,
"loss": 0.2671,
"step": 784
},
{
"epoch": 6.826985854189337,
"grad_norm": 1.4453125,
"learning_rate": 1.591304347826087e-05,
"loss": 0.4304,
"step": 785
},
{
"epoch": 6.835690968443961,
"grad_norm": 1.9375,
"learning_rate": 1.5869565217391306e-05,
"loss": 0.2066,
"step": 786
},
{
"epoch": 6.8443960826985855,
"grad_norm": 1.3671875,
"learning_rate": 1.582608695652174e-05,
"loss": 0.1951,
"step": 787
},
{
"epoch": 6.85310119695321,
"grad_norm": 1.421875,
"learning_rate": 1.5782608695652173e-05,
"loss": 0.1178,
"step": 788
},
{
"epoch": 6.861806311207834,
"grad_norm": 1.609375,
"learning_rate": 1.573913043478261e-05,
"loss": 0.1175,
"step": 789
},
{
"epoch": 6.8705114254624595,
"grad_norm": 1.65625,
"learning_rate": 1.5695652173913043e-05,
"loss": 0.1742,
"step": 790
},
{
"epoch": 6.879216539717084,
"grad_norm": 1.3359375,
"learning_rate": 1.565217391304348e-05,
"loss": 0.1665,
"step": 791
},
{
"epoch": 6.887921653971708,
"grad_norm": 1.546875,
"learning_rate": 1.5608695652173914e-05,
"loss": 0.107,
"step": 792
},
{
"epoch": 6.896626768226333,
"grad_norm": 1.1328125,
"learning_rate": 1.5565217391304347e-05,
"loss": 0.124,
"step": 793
},
{
"epoch": 6.905331882480958,
"grad_norm": 1.1875,
"learning_rate": 1.5521739130434784e-05,
"loss": 0.1306,
"step": 794
},
{
"epoch": 6.914036996735582,
"grad_norm": 1.640625,
"learning_rate": 1.5478260869565217e-05,
"loss": 0.2717,
"step": 795
},
{
"epoch": 6.922742110990207,
"grad_norm": 1.5078125,
"learning_rate": 1.5434782608695654e-05,
"loss": 0.3487,
"step": 796
},
{
"epoch": 6.931447225244831,
"grad_norm": 2.53125,
"learning_rate": 1.5391304347826088e-05,
"loss": 0.241,
"step": 797
},
{
"epoch": 6.9401523394994555,
"grad_norm": 1.171875,
"learning_rate": 1.5347826086956524e-05,
"loss": 0.0895,
"step": 798
},
{
"epoch": 6.948857453754081,
"grad_norm": 1.25,
"learning_rate": 1.5304347826086958e-05,
"loss": 0.131,
"step": 799
},
{
"epoch": 6.957562568008705,
"grad_norm": 1.5078125,
"learning_rate": 1.526086956521739e-05,
"loss": 0.1453,
"step": 800
},
{
"epoch": 6.96626768226333,
"grad_norm": 1.3671875,
"learning_rate": 1.5217391304347828e-05,
"loss": 0.1369,
"step": 801
},
{
"epoch": 6.974972796517954,
"grad_norm": 1.0234375,
"learning_rate": 1.517391304347826e-05,
"loss": 0.3932,
"step": 802
},
{
"epoch": 6.983677910772579,
"grad_norm": 1.3984375,
"learning_rate": 1.5130434782608697e-05,
"loss": 0.1276,
"step": 803
},
{
"epoch": 6.992383025027204,
"grad_norm": 1.40625,
"learning_rate": 1.5086956521739132e-05,
"loss": 0.1897,
"step": 804
},
{
"epoch": 7.0,
"grad_norm": 1.71875,
"learning_rate": 1.5043478260869565e-05,
"loss": 0.211,
"step": 805
},
{
"epoch": 7.008705114254624,
"grad_norm": 1.1015625,
"learning_rate": 1.5e-05,
"loss": 0.2396,
"step": 806
},
{
"epoch": 7.017410228509249,
"grad_norm": 1.1015625,
"learning_rate": 1.4956521739130436e-05,
"loss": 0.1055,
"step": 807
},
{
"epoch": 7.026115342763874,
"grad_norm": 0.90234375,
"learning_rate": 1.491304347826087e-05,
"loss": 0.0925,
"step": 808
},
{
"epoch": 7.0348204570184985,
"grad_norm": 0.89453125,
"learning_rate": 1.4869565217391304e-05,
"loss": 0.0873,
"step": 809
},
{
"epoch": 7.043525571273123,
"grad_norm": 1.421875,
"learning_rate": 1.482608695652174e-05,
"loss": 0.1059,
"step": 810
},
{
"epoch": 7.052230685527747,
"grad_norm": 1.0234375,
"learning_rate": 1.4782608695652176e-05,
"loss": 0.2531,
"step": 811
},
{
"epoch": 7.0609357997823725,
"grad_norm": 8.5,
"learning_rate": 1.4739130434782608e-05,
"loss": 0.308,
"step": 812
},
{
"epoch": 7.069640914036997,
"grad_norm": 2.0,
"learning_rate": 1.4695652173913045e-05,
"loss": 0.2168,
"step": 813
},
{
"epoch": 7.078346028291621,
"grad_norm": 1.578125,
"learning_rate": 1.465217391304348e-05,
"loss": 0.105,
"step": 814
},
{
"epoch": 7.087051142546246,
"grad_norm": 2.1875,
"learning_rate": 1.4608695652173915e-05,
"loss": 0.169,
"step": 815
},
{
"epoch": 7.09575625680087,
"grad_norm": 1.09375,
"learning_rate": 1.4565217391304348e-05,
"loss": 0.3773,
"step": 816
},
{
"epoch": 7.104461371055495,
"grad_norm": 1.2734375,
"learning_rate": 1.4521739130434783e-05,
"loss": 0.0821,
"step": 817
},
{
"epoch": 7.11316648531012,
"grad_norm": 1.5390625,
"learning_rate": 1.4478260869565218e-05,
"loss": 0.2362,
"step": 818
},
{
"epoch": 7.121871599564744,
"grad_norm": 1.25,
"learning_rate": 1.4434782608695652e-05,
"loss": 0.0891,
"step": 819
},
{
"epoch": 7.1305767138193685,
"grad_norm": 1.21875,
"learning_rate": 1.4391304347826087e-05,
"loss": 0.1366,
"step": 820
},
{
"epoch": 7.139281828073994,
"grad_norm": 1.3203125,
"learning_rate": 1.4347826086956522e-05,
"loss": 0.1512,
"step": 821
},
{
"epoch": 7.147986942328618,
"grad_norm": 1.2421875,
"learning_rate": 1.4304347826086959e-05,
"loss": 0.0816,
"step": 822
},
{
"epoch": 7.156692056583243,
"grad_norm": 2.484375,
"learning_rate": 1.426086956521739e-05,
"loss": 0.3353,
"step": 823
},
{
"epoch": 7.165397170837867,
"grad_norm": 1.0546875,
"learning_rate": 1.4217391304347828e-05,
"loss": 0.0691,
"step": 824
},
{
"epoch": 7.174102285092492,
"grad_norm": 1.421875,
"learning_rate": 1.4173913043478263e-05,
"loss": 0.4129,
"step": 825
},
{
"epoch": 7.182807399347117,
"grad_norm": 1.2734375,
"learning_rate": 1.4130434782608694e-05,
"loss": 0.2095,
"step": 826
},
{
"epoch": 7.191512513601741,
"grad_norm": 1.0234375,
"learning_rate": 1.4086956521739131e-05,
"loss": 0.221,
"step": 827
},
{
"epoch": 7.200217627856365,
"grad_norm": 1.1171875,
"learning_rate": 1.4043478260869566e-05,
"loss": 0.1166,
"step": 828
},
{
"epoch": 7.20892274211099,
"grad_norm": 1.0625,
"learning_rate": 1.4000000000000001e-05,
"loss": 0.1862,
"step": 829
},
{
"epoch": 7.217627856365615,
"grad_norm": 1.9453125,
"learning_rate": 1.3956521739130435e-05,
"loss": 0.3947,
"step": 830
},
{
"epoch": 7.2263329706202395,
"grad_norm": 1.5625,
"learning_rate": 1.391304347826087e-05,
"loss": 0.1411,
"step": 831
},
{
"epoch": 7.235038084874864,
"grad_norm": 1.6640625,
"learning_rate": 1.3869565217391305e-05,
"loss": 0.1547,
"step": 832
},
{
"epoch": 7.243743199129488,
"grad_norm": 1.203125,
"learning_rate": 1.3826086956521739e-05,
"loss": 0.1125,
"step": 833
},
{
"epoch": 7.2524483133841136,
"grad_norm": 1.078125,
"learning_rate": 1.3782608695652174e-05,
"loss": 0.0753,
"step": 834
},
{
"epoch": 7.261153427638738,
"grad_norm": 0.90625,
"learning_rate": 1.373913043478261e-05,
"loss": 0.0893,
"step": 835
},
{
"epoch": 7.269858541893362,
"grad_norm": 1.1796875,
"learning_rate": 1.3695652173913042e-05,
"loss": 0.09,
"step": 836
},
{
"epoch": 7.278563656147987,
"grad_norm": 1.109375,
"learning_rate": 1.3652173913043479e-05,
"loss": 0.0822,
"step": 837
},
{
"epoch": 7.287268770402612,
"grad_norm": 1.1484375,
"learning_rate": 1.3608695652173914e-05,
"loss": 0.1241,
"step": 838
},
{
"epoch": 7.295973884657236,
"grad_norm": 2.140625,
"learning_rate": 1.356521739130435e-05,
"loss": 0.2148,
"step": 839
},
{
"epoch": 7.304678998911861,
"grad_norm": 1.6015625,
"learning_rate": 1.3521739130434783e-05,
"loss": 0.2818,
"step": 840
},
{
"epoch": 7.313384113166485,
"grad_norm": 1.1484375,
"learning_rate": 1.3478260869565218e-05,
"loss": 0.196,
"step": 841
},
{
"epoch": 7.32208922742111,
"grad_norm": 1.828125,
"learning_rate": 1.3434782608695653e-05,
"loss": 0.2302,
"step": 842
},
{
"epoch": 7.330794341675735,
"grad_norm": 1.078125,
"learning_rate": 1.3391304347826086e-05,
"loss": 0.0971,
"step": 843
},
{
"epoch": 7.339499455930359,
"grad_norm": 2.328125,
"learning_rate": 1.3347826086956522e-05,
"loss": 0.2019,
"step": 844
},
{
"epoch": 7.348204570184984,
"grad_norm": 0.83984375,
"learning_rate": 1.3304347826086957e-05,
"loss": 0.0639,
"step": 845
},
{
"epoch": 7.356909684439608,
"grad_norm": 1.4296875,
"learning_rate": 1.3260869565217394e-05,
"loss": 0.1549,
"step": 846
},
{
"epoch": 7.365614798694233,
"grad_norm": 0.98828125,
"learning_rate": 1.3217391304347825e-05,
"loss": 0.0979,
"step": 847
},
{
"epoch": 7.374319912948858,
"grad_norm": 1.0546875,
"learning_rate": 1.3173913043478262e-05,
"loss": 0.0798,
"step": 848
},
{
"epoch": 7.383025027203482,
"grad_norm": 1.2578125,
"learning_rate": 1.3130434782608697e-05,
"loss": 0.0823,
"step": 849
},
{
"epoch": 7.3917301414581065,
"grad_norm": 1.015625,
"learning_rate": 1.308695652173913e-05,
"loss": 0.1346,
"step": 850
},
{
"epoch": 7.400435255712731,
"grad_norm": 1.046875,
"learning_rate": 1.3043478260869566e-05,
"loss": 0.199,
"step": 851
},
{
"epoch": 7.409140369967356,
"grad_norm": 1.1953125,
"learning_rate": 1.3000000000000001e-05,
"loss": 0.0879,
"step": 852
},
{
"epoch": 7.4178454842219805,
"grad_norm": 1.109375,
"learning_rate": 1.2956521739130436e-05,
"loss": 0.0789,
"step": 853
},
{
"epoch": 7.426550598476605,
"grad_norm": 1.2734375,
"learning_rate": 1.291304347826087e-05,
"loss": 0.1325,
"step": 854
},
{
"epoch": 7.435255712731229,
"grad_norm": 1.1796875,
"learning_rate": 1.2869565217391305e-05,
"loss": 0.2292,
"step": 855
},
{
"epoch": 7.443960826985855,
"grad_norm": 1.078125,
"learning_rate": 1.2826086956521741e-05,
"loss": 0.0977,
"step": 856
},
{
"epoch": 7.452665941240479,
"grad_norm": 1.1328125,
"learning_rate": 1.2782608695652173e-05,
"loss": 0.1695,
"step": 857
},
{
"epoch": 7.461371055495103,
"grad_norm": 1.125,
"learning_rate": 1.2739130434782608e-05,
"loss": 0.1297,
"step": 858
},
{
"epoch": 7.470076169749728,
"grad_norm": 2.21875,
"learning_rate": 1.2695652173913045e-05,
"loss": 0.336,
"step": 859
},
{
"epoch": 7.478781284004352,
"grad_norm": 1.203125,
"learning_rate": 1.265217391304348e-05,
"loss": 0.2094,
"step": 860
},
{
"epoch": 7.487486398258977,
"grad_norm": 1.1875,
"learning_rate": 1.2608695652173914e-05,
"loss": 0.09,
"step": 861
},
{
"epoch": 7.496191512513602,
"grad_norm": 1.296875,
"learning_rate": 1.2565217391304349e-05,
"loss": 0.3408,
"step": 862
},
{
"epoch": 7.504896626768226,
"grad_norm": 1.25,
"learning_rate": 1.2521739130434784e-05,
"loss": 0.2075,
"step": 863
},
{
"epoch": 7.513601741022851,
"grad_norm": 1.890625,
"learning_rate": 1.2478260869565219e-05,
"loss": 0.2772,
"step": 864
},
{
"epoch": 7.522306855277476,
"grad_norm": 1.0859375,
"learning_rate": 1.2434782608695652e-05,
"loss": 0.1936,
"step": 865
},
{
"epoch": 7.5310119695321,
"grad_norm": 1.140625,
"learning_rate": 1.2391304347826088e-05,
"loss": 0.1781,
"step": 866
},
{
"epoch": 7.539717083786725,
"grad_norm": 1.7734375,
"learning_rate": 1.2347826086956523e-05,
"loss": 0.3087,
"step": 867
},
{
"epoch": 7.548422198041349,
"grad_norm": 1.21875,
"learning_rate": 1.2304347826086956e-05,
"loss": 0.0969,
"step": 868
},
{
"epoch": 7.5571273122959735,
"grad_norm": 1.2265625,
"learning_rate": 1.2260869565217393e-05,
"loss": 0.0817,
"step": 869
},
{
"epoch": 7.565832426550599,
"grad_norm": 1.1171875,
"learning_rate": 1.2217391304347826e-05,
"loss": 0.0882,
"step": 870
},
{
"epoch": 7.574537540805223,
"grad_norm": 1.265625,
"learning_rate": 1.2173913043478261e-05,
"loss": 0.1011,
"step": 871
},
{
"epoch": 7.5832426550598475,
"grad_norm": 1.3125,
"learning_rate": 1.2130434782608697e-05,
"loss": 0.1108,
"step": 872
},
{
"epoch": 7.591947769314472,
"grad_norm": 1.328125,
"learning_rate": 1.208695652173913e-05,
"loss": 0.1778,
"step": 873
},
{
"epoch": 7.600652883569097,
"grad_norm": 1.171875,
"learning_rate": 1.2043478260869565e-05,
"loss": 0.0919,
"step": 874
},
{
"epoch": 7.609357997823722,
"grad_norm": 1.1328125,
"learning_rate": 1.2e-05,
"loss": 0.0926,
"step": 875
},
{
"epoch": 7.618063112078346,
"grad_norm": 1.0859375,
"learning_rate": 1.1956521739130435e-05,
"loss": 0.1102,
"step": 876
},
{
"epoch": 7.62676822633297,
"grad_norm": 1.1328125,
"learning_rate": 1.191304347826087e-05,
"loss": 0.139,
"step": 877
},
{
"epoch": 7.635473340587595,
"grad_norm": 1.0703125,
"learning_rate": 1.1869565217391306e-05,
"loss": 0.074,
"step": 878
},
{
"epoch": 7.64417845484222,
"grad_norm": 1.296875,
"learning_rate": 1.1826086956521739e-05,
"loss": 0.102,
"step": 879
},
{
"epoch": 7.652883569096844,
"grad_norm": 1.0703125,
"learning_rate": 1.1782608695652174e-05,
"loss": 0.0972,
"step": 880
},
{
"epoch": 7.661588683351469,
"grad_norm": 1.5625,
"learning_rate": 1.173913043478261e-05,
"loss": 0.4786,
"step": 881
},
{
"epoch": 7.670293797606093,
"grad_norm": 1.4375,
"learning_rate": 1.1695652173913044e-05,
"loss": 0.3028,
"step": 882
},
{
"epoch": 7.6789989118607185,
"grad_norm": 1.21875,
"learning_rate": 1.165217391304348e-05,
"loss": 0.1265,
"step": 883
},
{
"epoch": 7.687704026115343,
"grad_norm": 1.6875,
"learning_rate": 1.1608695652173913e-05,
"loss": 0.1341,
"step": 884
},
{
"epoch": 7.696409140369967,
"grad_norm": 1.59375,
"learning_rate": 1.156521739130435e-05,
"loss": 0.0997,
"step": 885
},
{
"epoch": 7.705114254624592,
"grad_norm": 1.5703125,
"learning_rate": 1.1521739130434783e-05,
"loss": 0.1505,
"step": 886
},
{
"epoch": 7.713819368879216,
"grad_norm": 1.203125,
"learning_rate": 1.1478260869565217e-05,
"loss": 0.0841,
"step": 887
},
{
"epoch": 7.722524483133841,
"grad_norm": 1.328125,
"learning_rate": 1.1434782608695654e-05,
"loss": 0.1177,
"step": 888
},
{
"epoch": 7.731229597388466,
"grad_norm": 1.296875,
"learning_rate": 1.1391304347826087e-05,
"loss": 0.0964,
"step": 889
},
{
"epoch": 7.73993471164309,
"grad_norm": 1.15625,
"learning_rate": 1.1347826086956522e-05,
"loss": 0.208,
"step": 890
},
{
"epoch": 7.748639825897715,
"grad_norm": 1.296875,
"learning_rate": 1.1304347826086957e-05,
"loss": 0.0783,
"step": 891
},
{
"epoch": 7.75734494015234,
"grad_norm": 1.046875,
"learning_rate": 1.126086956521739e-05,
"loss": 0.1108,
"step": 892
},
{
"epoch": 7.766050054406964,
"grad_norm": 1.7265625,
"learning_rate": 1.1217391304347827e-05,
"loss": 0.2589,
"step": 893
},
{
"epoch": 7.7747551686615886,
"grad_norm": 1.125,
"learning_rate": 1.1173913043478261e-05,
"loss": 0.0986,
"step": 894
},
{
"epoch": 7.783460282916213,
"grad_norm": 1.0859375,
"learning_rate": 1.1130434782608696e-05,
"loss": 0.1168,
"step": 895
},
{
"epoch": 7.792165397170838,
"grad_norm": 1.1484375,
"learning_rate": 1.1086956521739131e-05,
"loss": 0.2031,
"step": 896
},
{
"epoch": 7.800870511425463,
"grad_norm": 1.375,
"learning_rate": 1.1043478260869566e-05,
"loss": 0.1569,
"step": 897
},
{
"epoch": 7.809575625680087,
"grad_norm": 1.0546875,
"learning_rate": 1.1000000000000001e-05,
"loss": 0.0973,
"step": 898
},
{
"epoch": 7.818280739934711,
"grad_norm": 1.3203125,
"learning_rate": 1.0956521739130435e-05,
"loss": 0.1015,
"step": 899
},
{
"epoch": 7.826985854189337,
"grad_norm": 0.98046875,
"learning_rate": 1.091304347826087e-05,
"loss": 0.087,
"step": 900
},
{
"epoch": 7.835690968443961,
"grad_norm": 1.28125,
"learning_rate": 1.0869565217391305e-05,
"loss": 0.36,
"step": 901
},
{
"epoch": 7.8443960826985855,
"grad_norm": 2.765625,
"learning_rate": 1.082608695652174e-05,
"loss": 0.287,
"step": 902
},
{
"epoch": 7.85310119695321,
"grad_norm": 1.2265625,
"learning_rate": 1.0782608695652174e-05,
"loss": 0.0851,
"step": 903
},
{
"epoch": 7.861806311207834,
"grad_norm": 1.0546875,
"learning_rate": 1.0739130434782609e-05,
"loss": 0.1365,
"step": 904
},
{
"epoch": 7.8705114254624595,
"grad_norm": 1.171875,
"learning_rate": 1.0695652173913044e-05,
"loss": 0.2044,
"step": 905
},
{
"epoch": 7.879216539717084,
"grad_norm": 1.2734375,
"learning_rate": 1.0652173913043479e-05,
"loss": 0.1314,
"step": 906
},
{
"epoch": 7.887921653971708,
"grad_norm": 1.25,
"learning_rate": 1.0608695652173914e-05,
"loss": 0.1122,
"step": 907
},
{
"epoch": 7.896626768226333,
"grad_norm": 1.203125,
"learning_rate": 1.0565217391304348e-05,
"loss": 0.0922,
"step": 908
},
{
"epoch": 7.905331882480958,
"grad_norm": 1.1484375,
"learning_rate": 1.0521739130434784e-05,
"loss": 0.0913,
"step": 909
},
{
"epoch": 7.914036996735582,
"grad_norm": 1.125,
"learning_rate": 1.0478260869565218e-05,
"loss": 0.0941,
"step": 910
},
{
"epoch": 7.922742110990207,
"grad_norm": 1.0625,
"learning_rate": 1.0434782608695651e-05,
"loss": 0.0751,
"step": 911
},
{
"epoch": 7.931447225244831,
"grad_norm": 1.109375,
"learning_rate": 1.0391304347826088e-05,
"loss": 0.2471,
"step": 912
},
{
"epoch": 7.9401523394994555,
"grad_norm": 1.1953125,
"learning_rate": 1.0347826086956522e-05,
"loss": 0.1198,
"step": 913
},
{
"epoch": 7.948857453754081,
"grad_norm": 1.1171875,
"learning_rate": 1.0304347826086957e-05,
"loss": 0.0838,
"step": 914
},
{
"epoch": 7.957562568008705,
"grad_norm": 1.4609375,
"learning_rate": 1.0260869565217392e-05,
"loss": 0.2575,
"step": 915
},
{
"epoch": 7.96626768226333,
"grad_norm": 1.5,
"learning_rate": 1.0217391304347827e-05,
"loss": 0.0823,
"step": 916
},
{
"epoch": 7.974972796517954,
"grad_norm": 1.1171875,
"learning_rate": 1.0173913043478262e-05,
"loss": 0.0966,
"step": 917
},
{
"epoch": 7.983677910772579,
"grad_norm": 1.1171875,
"learning_rate": 1.0130434782608695e-05,
"loss": 0.0865,
"step": 918
},
{
"epoch": 7.992383025027204,
"grad_norm": 1.40625,
"learning_rate": 1.008695652173913e-05,
"loss": 0.2363,
"step": 919
},
{
"epoch": 8.0,
"grad_norm": 1.375,
"learning_rate": 1.0043478260869566e-05,
"loss": 0.2258,
"step": 920
}
],
"logging_steps": 1,
"max_steps": 1150,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.735118942215864e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}