PreThink_MemAgent / trainer_state.json
Zayhe's picture
Model save
daa9df6 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 418,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0023923444976076554,
"grad_norm": 35.77700764414347,
"learning_rate": 0.0,
"loss": 2.5866,
"num_tokens": 223542.0,
"step": 1
},
{
"epoch": 0.004784688995215311,
"grad_norm": 34.45799391693798,
"learning_rate": 7.692307692307694e-07,
"loss": 2.4086,
"num_tokens": 463760.0,
"step": 2
},
{
"epoch": 0.007177033492822967,
"grad_norm": 37.09122577441204,
"learning_rate": 1.5384615384615387e-06,
"loss": 2.5261,
"num_tokens": 645652.0,
"step": 3
},
{
"epoch": 0.009569377990430622,
"grad_norm": 33.51928857090818,
"learning_rate": 2.307692307692308e-06,
"loss": 2.4839,
"num_tokens": 857399.0,
"step": 4
},
{
"epoch": 0.011961722488038277,
"grad_norm": 28.56245226187573,
"learning_rate": 3.0769230769230774e-06,
"loss": 2.492,
"num_tokens": 1083627.0,
"step": 5
},
{
"epoch": 0.014354066985645933,
"grad_norm": 23.350183104787792,
"learning_rate": 3.846153846153847e-06,
"loss": 2.3562,
"num_tokens": 1301423.0,
"step": 6
},
{
"epoch": 0.01674641148325359,
"grad_norm": 18.9808075909167,
"learning_rate": 4.615384615384616e-06,
"loss": 2.3909,
"num_tokens": 1492075.0,
"step": 7
},
{
"epoch": 0.019138755980861243,
"grad_norm": 10.708395743692822,
"learning_rate": 5.384615384615385e-06,
"loss": 2.2473,
"num_tokens": 1679562.0,
"step": 8
},
{
"epoch": 0.0215311004784689,
"grad_norm": 9.224705958016894,
"learning_rate": 6.153846153846155e-06,
"loss": 2.1681,
"num_tokens": 1888359.0,
"step": 9
},
{
"epoch": 0.023923444976076555,
"grad_norm": 8.986278630054308,
"learning_rate": 6.923076923076923e-06,
"loss": 2.1438,
"num_tokens": 2058538.0,
"step": 10
},
{
"epoch": 0.02631578947368421,
"grad_norm": 4.193291761632025,
"learning_rate": 7.692307692307694e-06,
"loss": 2.0833,
"num_tokens": 2214696.0,
"step": 11
},
{
"epoch": 0.028708133971291867,
"grad_norm": 3.5075199043533334,
"learning_rate": 8.461538461538462e-06,
"loss": 1.9944,
"num_tokens": 2401723.0,
"step": 12
},
{
"epoch": 0.03110047846889952,
"grad_norm": 3.2504195652374874,
"learning_rate": 9.230769230769232e-06,
"loss": 2.0099,
"num_tokens": 2592218.0,
"step": 13
},
{
"epoch": 0.03349282296650718,
"grad_norm": 2.3589570152818795,
"learning_rate": 1e-05,
"loss": 1.9879,
"num_tokens": 2820770.0,
"step": 14
},
{
"epoch": 0.03588516746411483,
"grad_norm": 3.6838613043114705,
"learning_rate": 9.999864615158956e-06,
"loss": 1.816,
"num_tokens": 3018187.0,
"step": 15
},
{
"epoch": 0.03827751196172249,
"grad_norm": 2.804031211195472,
"learning_rate": 9.999458468782065e-06,
"loss": 1.9285,
"num_tokens": 3262110.0,
"step": 16
},
{
"epoch": 0.04066985645933014,
"grad_norm": 2.7340483571073664,
"learning_rate": 9.998781585307577e-06,
"loss": 1.7399,
"num_tokens": 3461819.0,
"step": 17
},
{
"epoch": 0.0430622009569378,
"grad_norm": 2.3573648641331952,
"learning_rate": 9.997834005464281e-06,
"loss": 1.8248,
"num_tokens": 3647749.0,
"step": 18
},
{
"epoch": 0.045454545454545456,
"grad_norm": 2.7936113880349964,
"learning_rate": 9.996615786269036e-06,
"loss": 1.514,
"num_tokens": 3816664.0,
"step": 19
},
{
"epoch": 0.04784688995215311,
"grad_norm": 1.5815077522510241,
"learning_rate": 9.995127001023362e-06,
"loss": 1.8473,
"num_tokens": 3981662.0,
"step": 20
},
{
"epoch": 0.050239234449760764,
"grad_norm": 1.262398317459698,
"learning_rate": 9.993367739309013e-06,
"loss": 1.7998,
"num_tokens": 4166705.0,
"step": 21
},
{
"epoch": 0.05263157894736842,
"grad_norm": 1.2085839356904715,
"learning_rate": 9.991338106982598e-06,
"loss": 1.7167,
"num_tokens": 4395460.0,
"step": 22
},
{
"epoch": 0.05502392344497608,
"grad_norm": 0.9331954135736191,
"learning_rate": 9.98903822616921e-06,
"loss": 1.7225,
"num_tokens": 4660660.0,
"step": 23
},
{
"epoch": 0.05741626794258373,
"grad_norm": 1.0079376114307204,
"learning_rate": 9.986468235255065e-06,
"loss": 1.7793,
"num_tokens": 4889032.0,
"step": 24
},
{
"epoch": 0.05980861244019139,
"grad_norm": 1.0174373092140978,
"learning_rate": 9.983628288879193e-06,
"loss": 1.6658,
"num_tokens": 5068345.0,
"step": 25
},
{
"epoch": 0.06220095693779904,
"grad_norm": 0.9895227235288739,
"learning_rate": 9.98051855792412e-06,
"loss": 1.7126,
"num_tokens": 5284697.0,
"step": 26
},
{
"epoch": 0.0645933014354067,
"grad_norm": 0.864412154650663,
"learning_rate": 9.977139229505596e-06,
"loss": 1.658,
"num_tokens": 5521778.0,
"step": 27
},
{
"epoch": 0.06698564593301436,
"grad_norm": 1.0342443056226127,
"learning_rate": 9.973490506961326e-06,
"loss": 1.5243,
"num_tokens": 5707434.0,
"step": 28
},
{
"epoch": 0.06937799043062201,
"grad_norm": 0.8663151664394599,
"learning_rate": 9.969572609838745e-06,
"loss": 1.6671,
"num_tokens": 5937867.0,
"step": 29
},
{
"epoch": 0.07177033492822966,
"grad_norm": 0.9781488889720827,
"learning_rate": 9.965385773881795e-06,
"loss": 1.6322,
"num_tokens": 6162073.0,
"step": 30
},
{
"epoch": 0.07416267942583732,
"grad_norm": 0.9101239670857304,
"learning_rate": 9.960930251016752e-06,
"loss": 1.716,
"num_tokens": 6375008.0,
"step": 31
},
{
"epoch": 0.07655502392344497,
"grad_norm": 0.9203816431810994,
"learning_rate": 9.956206309337067e-06,
"loss": 1.6335,
"num_tokens": 6568479.0,
"step": 32
},
{
"epoch": 0.07894736842105263,
"grad_norm": 0.9924366034325527,
"learning_rate": 9.951214233087223e-06,
"loss": 1.6849,
"num_tokens": 6786875.0,
"step": 33
},
{
"epoch": 0.08133971291866028,
"grad_norm": 1.0591590540309344,
"learning_rate": 9.945954322645643e-06,
"loss": 1.5556,
"num_tokens": 6983383.0,
"step": 34
},
{
"epoch": 0.08373205741626795,
"grad_norm": 0.859091081777175,
"learning_rate": 9.940426894506608e-06,
"loss": 1.6373,
"num_tokens": 7188567.0,
"step": 35
},
{
"epoch": 0.0861244019138756,
"grad_norm": 0.7819453253946674,
"learning_rate": 9.934632281261221e-06,
"loss": 1.5762,
"num_tokens": 7385552.0,
"step": 36
},
{
"epoch": 0.08851674641148326,
"grad_norm": 0.773342660575658,
"learning_rate": 9.928570831577396e-06,
"loss": 1.5561,
"num_tokens": 7583628.0,
"step": 37
},
{
"epoch": 0.09090909090909091,
"grad_norm": 0.6689075869463446,
"learning_rate": 9.922242910178862e-06,
"loss": 1.6899,
"num_tokens": 7809190.0,
"step": 38
},
{
"epoch": 0.09330143540669857,
"grad_norm": 0.7505436942893252,
"learning_rate": 9.915648897823232e-06,
"loss": 1.554,
"num_tokens": 7997624.0,
"step": 39
},
{
"epoch": 0.09569377990430622,
"grad_norm": 0.7015919399979235,
"learning_rate": 9.908789191279093e-06,
"loss": 1.5389,
"num_tokens": 8183494.0,
"step": 40
},
{
"epoch": 0.09808612440191387,
"grad_norm": 0.6466036781332534,
"learning_rate": 9.901664203302126e-06,
"loss": 1.6405,
"num_tokens": 8408569.0,
"step": 41
},
{
"epoch": 0.10047846889952153,
"grad_norm": 0.6548488840567466,
"learning_rate": 9.89427436261027e-06,
"loss": 1.5763,
"num_tokens": 8584336.0,
"step": 42
},
{
"epoch": 0.10287081339712918,
"grad_norm": 0.699125932858908,
"learning_rate": 9.886620113857926e-06,
"loss": 1.5114,
"num_tokens": 8787695.0,
"step": 43
},
{
"epoch": 0.10526315789473684,
"grad_norm": 0.723472446208709,
"learning_rate": 9.878701917609208e-06,
"loss": 1.5305,
"num_tokens": 9005521.0,
"step": 44
},
{
"epoch": 0.1076555023923445,
"grad_norm": 0.7384196667665146,
"learning_rate": 9.870520250310223e-06,
"loss": 1.6305,
"num_tokens": 9221565.0,
"step": 45
},
{
"epoch": 0.11004784688995216,
"grad_norm": 0.629523841227617,
"learning_rate": 9.862075604260402e-06,
"loss": 1.6484,
"num_tokens": 9444154.0,
"step": 46
},
{
"epoch": 0.11244019138755981,
"grad_norm": 0.6425557703652314,
"learning_rate": 9.853368487582888e-06,
"loss": 1.4761,
"num_tokens": 9628428.0,
"step": 47
},
{
"epoch": 0.11483253588516747,
"grad_norm": 0.6988538672174611,
"learning_rate": 9.84439942419395e-06,
"loss": 1.6356,
"num_tokens": 9862779.0,
"step": 48
},
{
"epoch": 0.11722488038277512,
"grad_norm": 0.6304374061279027,
"learning_rate": 9.835168953771463e-06,
"loss": 1.5105,
"num_tokens": 10111617.0,
"step": 49
},
{
"epoch": 0.11961722488038277,
"grad_norm": 0.6166276961179814,
"learning_rate": 9.825677631722436e-06,
"loss": 1.6297,
"num_tokens": 10349939.0,
"step": 50
},
{
"epoch": 0.12200956937799043,
"grad_norm": 0.7275282546024043,
"learning_rate": 9.815926029149593e-06,
"loss": 1.5068,
"num_tokens": 10534354.0,
"step": 51
},
{
"epoch": 0.12440191387559808,
"grad_norm": 0.6327061243856574,
"learning_rate": 9.805914732817007e-06,
"loss": 1.5524,
"num_tokens": 10742091.0,
"step": 52
},
{
"epoch": 0.12679425837320574,
"grad_norm": 0.7084417481062125,
"learning_rate": 9.795644345114796e-06,
"loss": 1.6218,
"num_tokens": 10961264.0,
"step": 53
},
{
"epoch": 0.1291866028708134,
"grad_norm": 0.7292180354816592,
"learning_rate": 9.78511548402287e-06,
"loss": 1.3672,
"num_tokens": 11119849.0,
"step": 54
},
{
"epoch": 0.13157894736842105,
"grad_norm": 0.6555927818623691,
"learning_rate": 9.77432878307376e-06,
"loss": 1.5114,
"num_tokens": 11325079.0,
"step": 55
},
{
"epoch": 0.1339712918660287,
"grad_norm": 0.7613236601730216,
"learning_rate": 9.763284891314481e-06,
"loss": 1.2232,
"num_tokens": 11509772.0,
"step": 56
},
{
"epoch": 0.13636363636363635,
"grad_norm": 0.6430242992230627,
"learning_rate": 9.751984473267498e-06,
"loss": 1.5539,
"num_tokens": 11686001.0,
"step": 57
},
{
"epoch": 0.13875598086124402,
"grad_norm": 0.8426300537945989,
"learning_rate": 9.740428208890716e-06,
"loss": 1.3227,
"num_tokens": 11861406.0,
"step": 58
},
{
"epoch": 0.14114832535885166,
"grad_norm": 0.7044359465699243,
"learning_rate": 9.728616793536588e-06,
"loss": 1.5587,
"num_tokens": 12083010.0,
"step": 59
},
{
"epoch": 0.14354066985645933,
"grad_norm": 0.6499151456833749,
"learning_rate": 9.716550937910268e-06,
"loss": 1.4789,
"num_tokens": 12331238.0,
"step": 60
},
{
"epoch": 0.145933014354067,
"grad_norm": 0.8468125913528113,
"learning_rate": 9.70423136802684e-06,
"loss": 1.397,
"num_tokens": 12510110.0,
"step": 61
},
{
"epoch": 0.14832535885167464,
"grad_norm": 0.6549777551235627,
"learning_rate": 9.691658825167641e-06,
"loss": 1.5487,
"num_tokens": 12703234.0,
"step": 62
},
{
"epoch": 0.1507177033492823,
"grad_norm": 1.861908798533634,
"learning_rate": 9.67883406583566e-06,
"loss": 1.5182,
"num_tokens": 12941740.0,
"step": 63
},
{
"epoch": 0.15311004784688995,
"grad_norm": 0.9710524431103752,
"learning_rate": 9.665757861710008e-06,
"loss": 0.9673,
"num_tokens": 13066657.0,
"step": 64
},
{
"epoch": 0.15550239234449761,
"grad_norm": 0.7709985947911734,
"learning_rate": 9.652430999599491e-06,
"loss": 1.2468,
"num_tokens": 13220140.0,
"step": 65
},
{
"epoch": 0.15789473684210525,
"grad_norm": 1.148184964185836,
"learning_rate": 9.638854281395271e-06,
"loss": 1.3733,
"num_tokens": 13397562.0,
"step": 66
},
{
"epoch": 0.16028708133971292,
"grad_norm": 1.8909672590403999,
"learning_rate": 9.625028524022606e-06,
"loss": 1.5125,
"num_tokens": 13591137.0,
"step": 67
},
{
"epoch": 0.16267942583732056,
"grad_norm": 0.6070509391624586,
"learning_rate": 9.610954559391704e-06,
"loss": 1.576,
"num_tokens": 13812222.0,
"step": 68
},
{
"epoch": 0.16507177033492823,
"grad_norm": 0.9685185344694189,
"learning_rate": 9.596633234347661e-06,
"loss": 1.3289,
"num_tokens": 14034014.0,
"step": 69
},
{
"epoch": 0.1674641148325359,
"grad_norm": 1.3816831844533364,
"learning_rate": 9.582065410619503e-06,
"loss": 1.6031,
"num_tokens": 14248962.0,
"step": 70
},
{
"epoch": 0.16985645933014354,
"grad_norm": 1.0562478172616532,
"learning_rate": 9.567251964768343e-06,
"loss": 1.1815,
"num_tokens": 14461235.0,
"step": 71
},
{
"epoch": 0.1722488038277512,
"grad_norm": 0.8780808898792398,
"learning_rate": 9.55219378813463e-06,
"loss": 1.4514,
"num_tokens": 14655207.0,
"step": 72
},
{
"epoch": 0.17464114832535885,
"grad_norm": 1.1376167558495347,
"learning_rate": 9.53689178678452e-06,
"loss": 1.2722,
"num_tokens": 14832145.0,
"step": 73
},
{
"epoch": 0.17703349282296652,
"grad_norm": 0.954111645763459,
"learning_rate": 9.521346881455356e-06,
"loss": 1.3542,
"num_tokens": 15010659.0,
"step": 74
},
{
"epoch": 0.17942583732057416,
"grad_norm": 1.1453698076350756,
"learning_rate": 9.505560007500263e-06,
"loss": 1.3355,
"num_tokens": 15213072.0,
"step": 75
},
{
"epoch": 0.18181818181818182,
"grad_norm": 0.7130576585117359,
"learning_rate": 9.489532114831876e-06,
"loss": 1.4303,
"num_tokens": 15403547.0,
"step": 76
},
{
"epoch": 0.18421052631578946,
"grad_norm": 1.1940739116738854,
"learning_rate": 9.473264167865172e-06,
"loss": 1.4188,
"num_tokens": 15608337.0,
"step": 77
},
{
"epoch": 0.18660287081339713,
"grad_norm": 0.8288648375057176,
"learning_rate": 9.456757145459445e-06,
"loss": 1.5327,
"num_tokens": 15844122.0,
"step": 78
},
{
"epoch": 0.18899521531100477,
"grad_norm": 0.7407524557729414,
"learning_rate": 9.44001204085941e-06,
"loss": 1.2769,
"num_tokens": 16007831.0,
"step": 79
},
{
"epoch": 0.19138755980861244,
"grad_norm": 0.6820580256711418,
"learning_rate": 9.423029861635431e-06,
"loss": 1.4512,
"num_tokens": 16192803.0,
"step": 80
},
{
"epoch": 0.1937799043062201,
"grad_norm": 0.7648355853081366,
"learning_rate": 9.405811629622904e-06,
"loss": 1.5147,
"num_tokens": 16372150.0,
"step": 81
},
{
"epoch": 0.19617224880382775,
"grad_norm": 0.9430997963743641,
"learning_rate": 9.388358380860763e-06,
"loss": 1.439,
"num_tokens": 16544020.0,
"step": 82
},
{
"epoch": 0.19856459330143542,
"grad_norm": 0.6625036619665103,
"learning_rate": 9.370671165529146e-06,
"loss": 1.362,
"num_tokens": 16724075.0,
"step": 83
},
{
"epoch": 0.20095693779904306,
"grad_norm": 0.6779552456686411,
"learning_rate": 9.3527510478862e-06,
"loss": 1.296,
"num_tokens": 16892945.0,
"step": 84
},
{
"epoch": 0.20334928229665072,
"grad_norm": 0.8756233237648234,
"learning_rate": 9.334599106204051e-06,
"loss": 1.4463,
"num_tokens": 17052936.0,
"step": 85
},
{
"epoch": 0.20574162679425836,
"grad_norm": 0.7900920573876685,
"learning_rate": 9.316216432703918e-06,
"loss": 1.1998,
"num_tokens": 17229219.0,
"step": 86
},
{
"epoch": 0.20813397129186603,
"grad_norm": 0.6469526433608052,
"learning_rate": 9.29760413349039e-06,
"loss": 1.4746,
"num_tokens": 17439405.0,
"step": 87
},
{
"epoch": 0.21052631578947367,
"grad_norm": 0.616638088055178,
"learning_rate": 9.278763328484875e-06,
"loss": 1.6045,
"num_tokens": 17642943.0,
"step": 88
},
{
"epoch": 0.21291866028708134,
"grad_norm": 0.950240919701955,
"learning_rate": 9.259695151358215e-06,
"loss": 1.4296,
"num_tokens": 17893610.0,
"step": 89
},
{
"epoch": 0.215311004784689,
"grad_norm": 0.8787225924649048,
"learning_rate": 9.240400749462467e-06,
"loss": 1.3394,
"num_tokens": 18041227.0,
"step": 90
},
{
"epoch": 0.21770334928229665,
"grad_norm": 0.6650651036759095,
"learning_rate": 9.220881283761868e-06,
"loss": 1.4976,
"num_tokens": 18231908.0,
"step": 91
},
{
"epoch": 0.22009569377990432,
"grad_norm": 1.0488690262480544,
"learning_rate": 9.20113792876298e-06,
"loss": 1.544,
"num_tokens": 18449826.0,
"step": 92
},
{
"epoch": 0.22248803827751196,
"grad_norm": 0.8128101002861978,
"learning_rate": 9.181171872444015e-06,
"loss": 1.6064,
"num_tokens": 18678175.0,
"step": 93
},
{
"epoch": 0.22488038277511962,
"grad_norm": 0.8352400142358486,
"learning_rate": 9.160984316183354e-06,
"loss": 1.3891,
"num_tokens": 18856695.0,
"step": 94
},
{
"epoch": 0.22727272727272727,
"grad_norm": 0.6330668052004139,
"learning_rate": 9.140576474687263e-06,
"loss": 1.478,
"num_tokens": 19058871.0,
"step": 95
},
{
"epoch": 0.22966507177033493,
"grad_norm": 0.6984617187165848,
"learning_rate": 9.1199495759168e-06,
"loss": 1.3507,
"num_tokens": 19251656.0,
"step": 96
},
{
"epoch": 0.23205741626794257,
"grad_norm": 0.86370290023878,
"learning_rate": 9.099104861013922e-06,
"loss": 1.3834,
"num_tokens": 19432481.0,
"step": 97
},
{
"epoch": 0.23444976076555024,
"grad_norm": 0.7464764871732519,
"learning_rate": 9.078043584226816e-06,
"loss": 1.2249,
"num_tokens": 19625994.0,
"step": 98
},
{
"epoch": 0.23684210526315788,
"grad_norm": 0.7046998605163254,
"learning_rate": 9.056767012834417e-06,
"loss": 1.3523,
"num_tokens": 19879656.0,
"step": 99
},
{
"epoch": 0.23923444976076555,
"grad_norm": 0.6797355825616871,
"learning_rate": 9.035276427070166e-06,
"loss": 1.4905,
"num_tokens": 20094011.0,
"step": 100
},
{
"epoch": 0.24162679425837322,
"grad_norm": 0.643387767261834,
"learning_rate": 9.013573120044968e-06,
"loss": 1.4817,
"num_tokens": 20324769.0,
"step": 101
},
{
"epoch": 0.24401913875598086,
"grad_norm": 0.6345521357259186,
"learning_rate": 8.991658397669384e-06,
"loss": 1.406,
"num_tokens": 20523715.0,
"step": 102
},
{
"epoch": 0.24641148325358853,
"grad_norm": 0.5851359669759257,
"learning_rate": 8.96953357857507e-06,
"loss": 1.5156,
"num_tokens": 20744615.0,
"step": 103
},
{
"epoch": 0.24880382775119617,
"grad_norm": 0.7543388243657427,
"learning_rate": 8.947199994035402e-06,
"loss": 1.4965,
"num_tokens": 20954429.0,
"step": 104
},
{
"epoch": 0.2511961722488038,
"grad_norm": 0.6708121582654688,
"learning_rate": 8.924658987885403e-06,
"loss": 1.4257,
"num_tokens": 21152905.0,
"step": 105
},
{
"epoch": 0.2535885167464115,
"grad_norm": 0.8402032350426654,
"learning_rate": 8.901911916440867e-06,
"loss": 1.5563,
"num_tokens": 21368251.0,
"step": 106
},
{
"epoch": 0.25598086124401914,
"grad_norm": 0.7320851769974586,
"learning_rate": 8.878960148416747e-06,
"loss": 1.3888,
"num_tokens": 21508325.0,
"step": 107
},
{
"epoch": 0.2583732057416268,
"grad_norm": 0.7657651705836107,
"learning_rate": 8.855805064844808e-06,
"loss": 1.2907,
"num_tokens": 21657394.0,
"step": 108
},
{
"epoch": 0.2607655502392344,
"grad_norm": 0.7516028881611917,
"learning_rate": 8.832448058990522e-06,
"loss": 1.3294,
"num_tokens": 21898147.0,
"step": 109
},
{
"epoch": 0.2631578947368421,
"grad_norm": 0.7412973068093045,
"learning_rate": 8.80889053626923e-06,
"loss": 1.3493,
"num_tokens": 22114888.0,
"step": 110
},
{
"epoch": 0.26555023923444976,
"grad_norm": 0.715207637159085,
"learning_rate": 8.785133914161586e-06,
"loss": 1.3189,
"num_tokens": 22309919.0,
"step": 111
},
{
"epoch": 0.2679425837320574,
"grad_norm": 0.8490500056983592,
"learning_rate": 8.761179622128264e-06,
"loss": 1.2958,
"num_tokens": 22463886.0,
"step": 112
},
{
"epoch": 0.2703349282296651,
"grad_norm": 0.6593276317049697,
"learning_rate": 8.737029101523931e-06,
"loss": 1.3876,
"num_tokens": 22695323.0,
"step": 113
},
{
"epoch": 0.2727272727272727,
"grad_norm": 0.809173159712039,
"learning_rate": 8.712683805510547e-06,
"loss": 1.2752,
"num_tokens": 22923680.0,
"step": 114
},
{
"epoch": 0.2751196172248804,
"grad_norm": 0.7247603972983714,
"learning_rate": 8.6881451989699e-06,
"loss": 1.1074,
"num_tokens": 23075695.0,
"step": 115
},
{
"epoch": 0.27751196172248804,
"grad_norm": 0.6591979377593177,
"learning_rate": 8.66341475841548e-06,
"loss": 1.4945,
"num_tokens": 23320851.0,
"step": 116
},
{
"epoch": 0.2799043062200957,
"grad_norm": 1.1647325657638634,
"learning_rate": 8.638493971903621e-06,
"loss": 1.2714,
"num_tokens": 23524534.0,
"step": 117
},
{
"epoch": 0.2822966507177033,
"grad_norm": 0.7586726684402458,
"learning_rate": 8.613384338943982e-06,
"loss": 1.3522,
"num_tokens": 23773345.0,
"step": 118
},
{
"epoch": 0.284688995215311,
"grad_norm": 0.6733449030482199,
"learning_rate": 8.588087370409303e-06,
"loss": 1.3563,
"num_tokens": 23961835.0,
"step": 119
},
{
"epoch": 0.28708133971291866,
"grad_norm": 1.1832042021851583,
"learning_rate": 8.562604588444498e-06,
"loss": 1.368,
"num_tokens": 24186825.0,
"step": 120
},
{
"epoch": 0.2894736842105263,
"grad_norm": 1.1589708241685623,
"learning_rate": 8.536937526375075e-06,
"loss": 1.4034,
"num_tokens": 24422495.0,
"step": 121
},
{
"epoch": 0.291866028708134,
"grad_norm": 0.7701824945643486,
"learning_rate": 8.511087728614863e-06,
"loss": 1.4039,
"num_tokens": 24638181.0,
"step": 122
},
{
"epoch": 0.2942583732057416,
"grad_norm": 0.6500381479650347,
"learning_rate": 8.485056750573088e-06,
"loss": 1.4504,
"num_tokens": 24875758.0,
"step": 123
},
{
"epoch": 0.2966507177033493,
"grad_norm": 0.6319890651859407,
"learning_rate": 8.458846158560787e-06,
"loss": 1.4881,
"num_tokens": 25089829.0,
"step": 124
},
{
"epoch": 0.29904306220095694,
"grad_norm": 1.388998615476455,
"learning_rate": 8.43245752969655e-06,
"loss": 1.1747,
"num_tokens": 25311276.0,
"step": 125
},
{
"epoch": 0.3014354066985646,
"grad_norm": 1.0789606093954458,
"learning_rate": 8.40589245181163e-06,
"loss": 1.1788,
"num_tokens": 25489153.0,
"step": 126
},
{
"epoch": 0.3038277511961722,
"grad_norm": 0.7049145346829824,
"learning_rate": 8.379152523354407e-06,
"loss": 1.3323,
"num_tokens": 25669945.0,
"step": 127
},
{
"epoch": 0.3062200956937799,
"grad_norm": 0.6756413517215413,
"learning_rate": 8.352239353294196e-06,
"loss": 1.4037,
"num_tokens": 25909865.0,
"step": 128
},
{
"epoch": 0.30861244019138756,
"grad_norm": 0.8708376581094076,
"learning_rate": 8.325154561024445e-06,
"loss": 1.3679,
"num_tokens": 26127238.0,
"step": 129
},
{
"epoch": 0.31100478468899523,
"grad_norm": 1.3947730736376298,
"learning_rate": 8.29789977626528e-06,
"loss": 1.368,
"num_tokens": 26410830.0,
"step": 130
},
{
"epoch": 0.3133971291866029,
"grad_norm": 0.6066250801672678,
"learning_rate": 8.270476638965463e-06,
"loss": 1.4124,
"num_tokens": 26580685.0,
"step": 131
},
{
"epoch": 0.3157894736842105,
"grad_norm": 0.6459251127241467,
"learning_rate": 8.242886799203696e-06,
"loss": 1.4815,
"num_tokens": 26810854.0,
"step": 132
},
{
"epoch": 0.3181818181818182,
"grad_norm": 0.8523013271806112,
"learning_rate": 8.215131917089342e-06,
"loss": 1.3218,
"num_tokens": 27020107.0,
"step": 133
},
{
"epoch": 0.32057416267942584,
"grad_norm": 0.80764930998251,
"learning_rate": 8.187213662662539e-06,
"loss": 1.4524,
"num_tokens": 27225900.0,
"step": 134
},
{
"epoch": 0.3229665071770335,
"grad_norm": 0.8176074504442626,
"learning_rate": 8.159133715793701e-06,
"loss": 1.1947,
"num_tokens": 27437322.0,
"step": 135
},
{
"epoch": 0.3253588516746411,
"grad_norm": 0.6612632769182687,
"learning_rate": 8.13089376608245e-06,
"loss": 1.4414,
"num_tokens": 27620222.0,
"step": 136
},
{
"epoch": 0.3277511961722488,
"grad_norm": 0.7997382570532242,
"learning_rate": 8.102495512755939e-06,
"loss": 1.1692,
"num_tokens": 27779969.0,
"step": 137
},
{
"epoch": 0.33014354066985646,
"grad_norm": 0.7611481255340015,
"learning_rate": 8.073940664566623e-06,
"loss": 1.2419,
"num_tokens": 27975654.0,
"step": 138
},
{
"epoch": 0.33253588516746413,
"grad_norm": 1.2608591606765718,
"learning_rate": 8.045230939689425e-06,
"loss": 1.389,
"num_tokens": 28167517.0,
"step": 139
},
{
"epoch": 0.3349282296650718,
"grad_norm": 0.5773709072619498,
"learning_rate": 8.016368065618361e-06,
"loss": 1.4468,
"num_tokens": 28484552.0,
"step": 140
},
{
"epoch": 0.3373205741626794,
"grad_norm": 0.725628062383284,
"learning_rate": 7.987353779062598e-06,
"loss": 1.1387,
"num_tokens": 28658376.0,
"step": 141
},
{
"epoch": 0.3397129186602871,
"grad_norm": 1.0360991727200277,
"learning_rate": 7.958189825841942e-06,
"loss": 1.0863,
"num_tokens": 28857873.0,
"step": 142
},
{
"epoch": 0.34210526315789475,
"grad_norm": 0.8786402024406298,
"learning_rate": 7.928877960781808e-06,
"loss": 1.2436,
"num_tokens": 29065902.0,
"step": 143
},
{
"epoch": 0.3444976076555024,
"grad_norm": 0.6828988509231846,
"learning_rate": 7.899419947607611e-06,
"loss": 1.4069,
"num_tokens": 29258786.0,
"step": 144
},
{
"epoch": 0.34688995215311,
"grad_norm": 0.7208210642568142,
"learning_rate": 7.869817558838654e-06,
"loss": 1.0879,
"num_tokens": 29468217.0,
"step": 145
},
{
"epoch": 0.3492822966507177,
"grad_norm": 0.7753656834569056,
"learning_rate": 7.840072575681468e-06,
"loss": 1.254,
"num_tokens": 29637430.0,
"step": 146
},
{
"epoch": 0.35167464114832536,
"grad_norm": 0.9152188366691545,
"learning_rate": 7.810186787922645e-06,
"loss": 1.3457,
"num_tokens": 29878720.0,
"step": 147
},
{
"epoch": 0.35406698564593303,
"grad_norm": 0.6836864741610811,
"learning_rate": 7.78016199382112e-06,
"loss": 1.3482,
"num_tokens": 30112527.0,
"step": 148
},
{
"epoch": 0.35645933014354064,
"grad_norm": 0.7649029535599635,
"learning_rate": 7.75e-06,
"loss": 1.2517,
"num_tokens": 30303711.0,
"step": 149
},
{
"epoch": 0.3588516746411483,
"grad_norm": 0.6409818000165072,
"learning_rate": 7.719702621337834e-06,
"loss": 1.4567,
"num_tokens": 30495350.0,
"step": 150
},
{
"epoch": 0.361244019138756,
"grad_norm": 0.6118573341165198,
"learning_rate": 7.68927168085942e-06,
"loss": 1.5922,
"num_tokens": 30731067.0,
"step": 151
},
{
"epoch": 0.36363636363636365,
"grad_norm": 0.651410536310813,
"learning_rate": 7.658709009626109e-06,
"loss": 1.4001,
"num_tokens": 30991669.0,
"step": 152
},
{
"epoch": 0.3660287081339713,
"grad_norm": 0.6691292954708529,
"learning_rate": 7.628016446625626e-06,
"loss": 1.3455,
"num_tokens": 31201627.0,
"step": 153
},
{
"epoch": 0.3684210526315789,
"grad_norm": 0.6334035930223118,
"learning_rate": 7.597195838661426e-06,
"loss": 1.5338,
"num_tokens": 31406112.0,
"step": 154
},
{
"epoch": 0.3708133971291866,
"grad_norm": 0.6127199012442186,
"learning_rate": 7.566249040241553e-06,
"loss": 1.4887,
"num_tokens": 31609455.0,
"step": 155
},
{
"epoch": 0.37320574162679426,
"grad_norm": 0.7297799820848448,
"learning_rate": 7.53517791346707e-06,
"loss": 1.4004,
"num_tokens": 31780471.0,
"step": 156
},
{
"epoch": 0.37559808612440193,
"grad_norm": 0.6946608625160634,
"learning_rate": 7.503984327920003e-06,
"loss": 1.4223,
"num_tokens": 32000025.0,
"step": 157
},
{
"epoch": 0.37799043062200954,
"grad_norm": 0.8430323980042668,
"learning_rate": 7.472670160550849e-06,
"loss": 1.4304,
"num_tokens": 32231426.0,
"step": 158
},
{
"epoch": 0.3803827751196172,
"grad_norm": 0.5357203180428419,
"learning_rate": 7.441237295565642e-06,
"loss": 1.3439,
"num_tokens": 32506275.0,
"step": 159
},
{
"epoch": 0.3827751196172249,
"grad_norm": 0.7610722225382753,
"learning_rate": 7.409687624312569e-06,
"loss": 1.5259,
"num_tokens": 32731571.0,
"step": 160
},
{
"epoch": 0.38516746411483255,
"grad_norm": 0.8234610158166737,
"learning_rate": 7.378023045168181e-06,
"loss": 1.4657,
"num_tokens": 32929424.0,
"step": 161
},
{
"epoch": 0.3875598086124402,
"grad_norm": 0.6460594254871024,
"learning_rate": 7.346245463423148e-06,
"loss": 1.1413,
"num_tokens": 33091736.0,
"step": 162
},
{
"epoch": 0.38995215311004783,
"grad_norm": 0.6455217606881992,
"learning_rate": 7.314356791167626e-06,
"loss": 1.4832,
"num_tokens": 33325006.0,
"step": 163
},
{
"epoch": 0.3923444976076555,
"grad_norm": 0.6417174012861143,
"learning_rate": 7.282358947176207e-06,
"loss": 1.4427,
"num_tokens": 33527584.0,
"step": 164
},
{
"epoch": 0.39473684210526316,
"grad_norm": 0.7003025374700267,
"learning_rate": 7.250253856792452e-06,
"loss": 1.4317,
"num_tokens": 33749355.0,
"step": 165
},
{
"epoch": 0.39712918660287083,
"grad_norm": 0.694765245729581,
"learning_rate": 7.218043451813058e-06,
"loss": 1.5191,
"num_tokens": 34042210.0,
"step": 166
},
{
"epoch": 0.39952153110047844,
"grad_norm": 0.6862863313107851,
"learning_rate": 7.185729670371605e-06,
"loss": 1.2854,
"num_tokens": 34231648.0,
"step": 167
},
{
"epoch": 0.4019138755980861,
"grad_norm": 0.627780658507271,
"learning_rate": 7.153314456821942e-06,
"loss": 1.429,
"num_tokens": 34451588.0,
"step": 168
},
{
"epoch": 0.4043062200956938,
"grad_norm": 0.824208162193406,
"learning_rate": 7.120799761621198e-06,
"loss": 1.1818,
"num_tokens": 34618135.0,
"step": 169
},
{
"epoch": 0.40669856459330145,
"grad_norm": 0.9393638903104259,
"learning_rate": 7.08818754121241e-06,
"loss": 1.3077,
"num_tokens": 34832232.0,
"step": 170
},
{
"epoch": 0.4090909090909091,
"grad_norm": 0.6416833884125813,
"learning_rate": 7.0554797579068155e-06,
"loss": 1.3405,
"num_tokens": 35024526.0,
"step": 171
},
{
"epoch": 0.41148325358851673,
"grad_norm": 0.5995702891023682,
"learning_rate": 7.022678379765766e-06,
"loss": 1.4452,
"num_tokens": 35268266.0,
"step": 172
},
{
"epoch": 0.4138755980861244,
"grad_norm": 0.636425551662653,
"learning_rate": 6.989785380482313e-06,
"loss": 1.3153,
"num_tokens": 35477419.0,
"step": 173
},
{
"epoch": 0.41626794258373206,
"grad_norm": 0.6814014450370663,
"learning_rate": 6.956802739262446e-06,
"loss": 1.3158,
"num_tokens": 35674184.0,
"step": 174
},
{
"epoch": 0.41866028708133973,
"grad_norm": 0.6594946487026292,
"learning_rate": 6.923732440706005e-06,
"loss": 1.1427,
"num_tokens": 35840117.0,
"step": 175
},
{
"epoch": 0.42105263157894735,
"grad_norm": 0.7126174750307512,
"learning_rate": 6.890576474687264e-06,
"loss": 1.4535,
"num_tokens": 36065504.0,
"step": 176
},
{
"epoch": 0.423444976076555,
"grad_norm": 0.6199132849036029,
"learning_rate": 6.857336836235195e-06,
"loss": 1.332,
"num_tokens": 36231288.0,
"step": 177
},
{
"epoch": 0.4258373205741627,
"grad_norm": 0.6209510353715602,
"learning_rate": 6.824015525413428e-06,
"loss": 1.3095,
"num_tokens": 36423759.0,
"step": 178
},
{
"epoch": 0.42822966507177035,
"grad_norm": 0.7108089495676037,
"learning_rate": 6.790614547199908e-06,
"loss": 1.3967,
"num_tokens": 36654811.0,
"step": 179
},
{
"epoch": 0.430622009569378,
"grad_norm": 0.8599346708468241,
"learning_rate": 6.7571359113662405e-06,
"loss": 1.1825,
"num_tokens": 36806684.0,
"step": 180
},
{
"epoch": 0.43301435406698563,
"grad_norm": 0.7746452027624019,
"learning_rate": 6.723581632356783e-06,
"loss": 1.2524,
"num_tokens": 36974284.0,
"step": 181
},
{
"epoch": 0.4354066985645933,
"grad_norm": 0.6020692340908421,
"learning_rate": 6.689953729167411e-06,
"loss": 1.2342,
"num_tokens": 37200621.0,
"step": 182
},
{
"epoch": 0.43779904306220097,
"grad_norm": 0.797055893773105,
"learning_rate": 6.65625422522405e-06,
"loss": 1.0268,
"num_tokens": 37342950.0,
"step": 183
},
{
"epoch": 0.44019138755980863,
"grad_norm": 0.7123312279475232,
"learning_rate": 6.622485148260916e-06,
"loss": 1.3021,
"num_tokens": 37515788.0,
"step": 184
},
{
"epoch": 0.44258373205741625,
"grad_norm": 0.7937734591260545,
"learning_rate": 6.588648530198505e-06,
"loss": 1.4699,
"num_tokens": 37746273.0,
"step": 185
},
{
"epoch": 0.4449760765550239,
"grad_norm": 0.8697751290589487,
"learning_rate": 6.554746407021332e-06,
"loss": 1.171,
"num_tokens": 37900069.0,
"step": 186
},
{
"epoch": 0.4473684210526316,
"grad_norm": 0.6523686305235465,
"learning_rate": 6.520780818655421e-06,
"loss": 1.3579,
"num_tokens": 38100846.0,
"step": 187
},
{
"epoch": 0.44976076555023925,
"grad_norm": 0.76156891130206,
"learning_rate": 6.486753808845565e-06,
"loss": 1.2426,
"num_tokens": 38269916.0,
"step": 188
},
{
"epoch": 0.45215311004784686,
"grad_norm": 0.7778813369773113,
"learning_rate": 6.45266742503235e-06,
"loss": 1.417,
"num_tokens": 38507557.0,
"step": 189
},
{
"epoch": 0.45454545454545453,
"grad_norm": 0.6252852335577198,
"learning_rate": 6.418523718228952e-06,
"loss": 1.485,
"num_tokens": 38773090.0,
"step": 190
},
{
"epoch": 0.4569377990430622,
"grad_norm": 0.6334892736043869,
"learning_rate": 6.3843247428977365e-06,
"loss": 1.16,
"num_tokens": 38974351.0,
"step": 191
},
{
"epoch": 0.45933014354066987,
"grad_norm": 0.6580511686936932,
"learning_rate": 6.350072556826632e-06,
"loss": 1.3935,
"num_tokens": 39123913.0,
"step": 192
},
{
"epoch": 0.46172248803827753,
"grad_norm": 0.9741231496059587,
"learning_rate": 6.315769221005313e-06,
"loss": 1.3265,
"num_tokens": 39375811.0,
"step": 193
},
{
"epoch": 0.46411483253588515,
"grad_norm": 0.6742978137081723,
"learning_rate": 6.281416799501188e-06,
"loss": 1.3934,
"num_tokens": 39601804.0,
"step": 194
},
{
"epoch": 0.4665071770334928,
"grad_norm": 0.7390771455576962,
"learning_rate": 6.247017359335199e-06,
"loss": 1.171,
"num_tokens": 39757895.0,
"step": 195
},
{
"epoch": 0.4688995215311005,
"grad_norm": 0.6747486626145579,
"learning_rate": 6.2125729703574534e-06,
"loss": 1.3421,
"num_tokens": 39951954.0,
"step": 196
},
{
"epoch": 0.47129186602870815,
"grad_norm": 0.6361657014007149,
"learning_rate": 6.178085705122675e-06,
"loss": 1.3744,
"num_tokens": 40144065.0,
"step": 197
},
{
"epoch": 0.47368421052631576,
"grad_norm": 0.6813668201444038,
"learning_rate": 6.143557638765494e-06,
"loss": 1.2369,
"num_tokens": 40362129.0,
"step": 198
},
{
"epoch": 0.47607655502392343,
"grad_norm": 0.5383944924160711,
"learning_rate": 6.108990848875591e-06,
"loss": 1.4575,
"num_tokens": 40573664.0,
"step": 199
},
{
"epoch": 0.4784688995215311,
"grad_norm": 0.5793342283366394,
"learning_rate": 6.074387415372677e-06,
"loss": 1.2934,
"num_tokens": 40796949.0,
"step": 200
},
{
"epoch": 0.48086124401913877,
"grad_norm": 0.5637720873789647,
"learning_rate": 6.039749420381349e-06,
"loss": 1.2067,
"num_tokens": 41001222.0,
"step": 201
},
{
"epoch": 0.48325358851674644,
"grad_norm": 0.6798868345680159,
"learning_rate": 6.005078948105808e-06,
"loss": 1.1563,
"num_tokens": 41187424.0,
"step": 202
},
{
"epoch": 0.48564593301435405,
"grad_norm": 0.6423876571963225,
"learning_rate": 5.970378084704441e-06,
"loss": 1.2355,
"num_tokens": 41372314.0,
"step": 203
},
{
"epoch": 0.4880382775119617,
"grad_norm": 0.5968488672427387,
"learning_rate": 5.935648918164308e-06,
"loss": 1.4124,
"num_tokens": 41599872.0,
"step": 204
},
{
"epoch": 0.4904306220095694,
"grad_norm": 0.7153999682703835,
"learning_rate": 5.90089353817549e-06,
"loss": 1.2553,
"num_tokens": 41781334.0,
"step": 205
},
{
"epoch": 0.49282296650717705,
"grad_norm": 0.7701093892397903,
"learning_rate": 5.866114036005363e-06,
"loss": 0.9825,
"num_tokens": 41932610.0,
"step": 206
},
{
"epoch": 0.49521531100478466,
"grad_norm": 0.6134522844262603,
"learning_rate": 5.831312504372762e-06,
"loss": 1.4134,
"num_tokens": 42124179.0,
"step": 207
},
{
"epoch": 0.49760765550239233,
"grad_norm": 0.6289100662436853,
"learning_rate": 5.796491037322054e-06,
"loss": 1.1654,
"num_tokens": 42345381.0,
"step": 208
},
{
"epoch": 0.5,
"grad_norm": 0.5666466799765261,
"learning_rate": 5.761651730097142e-06,
"loss": 1.2846,
"num_tokens": 42538163.0,
"step": 209
},
{
"epoch": 0.5023923444976076,
"grad_norm": 0.7188005672009754,
"learning_rate": 5.726796679015392e-06,
"loss": 1.3365,
"num_tokens": 42694348.0,
"step": 210
},
{
"epoch": 0.5047846889952153,
"grad_norm": 0.5851351897769017,
"learning_rate": 5.691927981341488e-06,
"loss": 1.2529,
"num_tokens": 42927824.0,
"step": 211
},
{
"epoch": 0.507177033492823,
"grad_norm": 0.6489926998886038,
"learning_rate": 5.657047735161256e-06,
"loss": 1.2083,
"num_tokens": 43119784.0,
"step": 212
},
{
"epoch": 0.5095693779904307,
"grad_norm": 0.6806360393034492,
"learning_rate": 5.622158039255394e-06,
"loss": 1.2419,
"num_tokens": 43338927.0,
"step": 213
},
{
"epoch": 0.5119617224880383,
"grad_norm": 0.6900003239395834,
"learning_rate": 5.58726099297321e-06,
"loss": 1.3539,
"num_tokens": 43489368.0,
"step": 214
},
{
"epoch": 0.5143540669856459,
"grad_norm": 0.7208226618388429,
"learning_rate": 5.552358696106288e-06,
"loss": 1.0978,
"num_tokens": 43636289.0,
"step": 215
},
{
"epoch": 0.5167464114832536,
"grad_norm": 0.8421973876061544,
"learning_rate": 5.517453248762142e-06,
"loss": 1.2866,
"num_tokens": 43862748.0,
"step": 216
},
{
"epoch": 0.5191387559808612,
"grad_norm": 0.6260067887308286,
"learning_rate": 5.482546751237859e-06,
"loss": 1.3206,
"num_tokens": 44060052.0,
"step": 217
},
{
"epoch": 0.5215311004784688,
"grad_norm": 0.5760944390891047,
"learning_rate": 5.447641303893715e-06,
"loss": 1.439,
"num_tokens": 44297069.0,
"step": 218
},
{
"epoch": 0.5239234449760766,
"grad_norm": 0.61734789956525,
"learning_rate": 5.412739007026791e-06,
"loss": 1.4205,
"num_tokens": 44490168.0,
"step": 219
},
{
"epoch": 0.5263157894736842,
"grad_norm": 0.6257750740469292,
"learning_rate": 5.377841960744607e-06,
"loss": 1.3976,
"num_tokens": 44704452.0,
"step": 220
},
{
"epoch": 0.5287081339712919,
"grad_norm": 0.5344707538543756,
"learning_rate": 5.342952264838748e-06,
"loss": 1.2854,
"num_tokens": 44963417.0,
"step": 221
},
{
"epoch": 0.5311004784688995,
"grad_norm": 0.7495273348676206,
"learning_rate": 5.308072018658512e-06,
"loss": 1.041,
"num_tokens": 45085562.0,
"step": 222
},
{
"epoch": 0.5334928229665071,
"grad_norm": 0.6174774052991956,
"learning_rate": 5.273203320984611e-06,
"loss": 1.3262,
"num_tokens": 45278857.0,
"step": 223
},
{
"epoch": 0.5358851674641149,
"grad_norm": 0.5347084709996872,
"learning_rate": 5.23834826990286e-06,
"loss": 1.2666,
"num_tokens": 45496428.0,
"step": 224
},
{
"epoch": 0.5382775119617225,
"grad_norm": 0.537469270834151,
"learning_rate": 5.203508962677947e-06,
"loss": 1.1741,
"num_tokens": 45712553.0,
"step": 225
},
{
"epoch": 0.5406698564593302,
"grad_norm": 0.6378408098836623,
"learning_rate": 5.168687495627239e-06,
"loss": 1.1603,
"num_tokens": 45901658.0,
"step": 226
},
{
"epoch": 0.5430622009569378,
"grad_norm": 0.6300812904077526,
"learning_rate": 5.1338859639946396e-06,
"loss": 1.3905,
"num_tokens": 46088856.0,
"step": 227
},
{
"epoch": 0.5454545454545454,
"grad_norm": 0.634911596104642,
"learning_rate": 5.099106461824513e-06,
"loss": 1.2321,
"num_tokens": 46291266.0,
"step": 228
},
{
"epoch": 0.5478468899521531,
"grad_norm": 0.6926590384685983,
"learning_rate": 5.064351081835695e-06,
"loss": 1.1985,
"num_tokens": 46454514.0,
"step": 229
},
{
"epoch": 0.5502392344497608,
"grad_norm": 0.646212813197843,
"learning_rate": 5.02962191529556e-06,
"loss": 1.162,
"num_tokens": 46613219.0,
"step": 230
},
{
"epoch": 0.5526315789473685,
"grad_norm": 0.6542386722720754,
"learning_rate": 4.9949210518941945e-06,
"loss": 1.4429,
"num_tokens": 46844393.0,
"step": 231
},
{
"epoch": 0.5550239234449761,
"grad_norm": 0.5729641542767329,
"learning_rate": 4.960250579618652e-06,
"loss": 1.3042,
"num_tokens": 47083488.0,
"step": 232
},
{
"epoch": 0.5574162679425837,
"grad_norm": 0.6558395223982498,
"learning_rate": 4.925612584627325e-06,
"loss": 1.4322,
"num_tokens": 47296155.0,
"step": 233
},
{
"epoch": 0.5598086124401914,
"grad_norm": 0.5293743728173484,
"learning_rate": 4.8910091511244115e-06,
"loss": 1.4863,
"num_tokens": 47513493.0,
"step": 234
},
{
"epoch": 0.562200956937799,
"grad_norm": 0.5943251941494672,
"learning_rate": 4.856442361234507e-06,
"loss": 1.4734,
"num_tokens": 47716889.0,
"step": 235
},
{
"epoch": 0.5645933014354066,
"grad_norm": 0.6271955944541076,
"learning_rate": 4.821914294877327e-06,
"loss": 1.4372,
"num_tokens": 47925440.0,
"step": 236
},
{
"epoch": 0.5669856459330144,
"grad_norm": 0.7789065855387718,
"learning_rate": 4.787427029642549e-06,
"loss": 1.0957,
"num_tokens": 48073033.0,
"step": 237
},
{
"epoch": 0.569377990430622,
"grad_norm": 0.6495651224848629,
"learning_rate": 4.752982640664804e-06,
"loss": 1.3268,
"num_tokens": 48248595.0,
"step": 238
},
{
"epoch": 0.5717703349282297,
"grad_norm": 0.6160766106630973,
"learning_rate": 4.718583200498814e-06,
"loss": 1.4054,
"num_tokens": 48424413.0,
"step": 239
},
{
"epoch": 0.5741626794258373,
"grad_norm": 0.623669847382596,
"learning_rate": 4.684230778994688e-06,
"loss": 1.3817,
"num_tokens": 48613957.0,
"step": 240
},
{
"epoch": 0.5765550239234449,
"grad_norm": 0.6760867003584786,
"learning_rate": 4.64992744317337e-06,
"loss": 1.4087,
"num_tokens": 48801450.0,
"step": 241
},
{
"epoch": 0.5789473684210527,
"grad_norm": 0.6726827041038008,
"learning_rate": 4.615675257102265e-06,
"loss": 1.3587,
"num_tokens": 48992762.0,
"step": 242
},
{
"epoch": 0.5813397129186603,
"grad_norm": 0.7586345648423406,
"learning_rate": 4.58147628177105e-06,
"loss": 1.405,
"num_tokens": 49206463.0,
"step": 243
},
{
"epoch": 0.583732057416268,
"grad_norm": 0.5748903626756076,
"learning_rate": 4.547332574967653e-06,
"loss": 1.2833,
"num_tokens": 49439116.0,
"step": 244
},
{
"epoch": 0.5861244019138756,
"grad_norm": 0.6459030659564946,
"learning_rate": 4.513246191154434e-06,
"loss": 1.3808,
"num_tokens": 49611111.0,
"step": 245
},
{
"epoch": 0.5885167464114832,
"grad_norm": 0.5897283591830758,
"learning_rate": 4.479219181344579e-06,
"loss": 1.414,
"num_tokens": 49808207.0,
"step": 246
},
{
"epoch": 0.5909090909090909,
"grad_norm": 0.5803885935290669,
"learning_rate": 4.44525359297867e-06,
"loss": 1.412,
"num_tokens": 50005343.0,
"step": 247
},
{
"epoch": 0.5933014354066986,
"grad_norm": 0.587903351707854,
"learning_rate": 4.4113514698014955e-06,
"loss": 1.3887,
"num_tokens": 50238374.0,
"step": 248
},
{
"epoch": 0.5956937799043063,
"grad_norm": 0.6203595406125716,
"learning_rate": 4.377514851739085e-06,
"loss": 1.2548,
"num_tokens": 50470788.0,
"step": 249
},
{
"epoch": 0.5980861244019139,
"grad_norm": 0.5500030062641514,
"learning_rate": 4.3437457747759515e-06,
"loss": 1.2764,
"num_tokens": 50686198.0,
"step": 250
},
{
"epoch": 0.6004784688995215,
"grad_norm": 0.7362861850087726,
"learning_rate": 4.310046270832592e-06,
"loss": 1.2957,
"num_tokens": 50846591.0,
"step": 251
},
{
"epoch": 0.6028708133971292,
"grad_norm": 0.69063454686759,
"learning_rate": 4.276418367643218e-06,
"loss": 1.2816,
"num_tokens": 51043154.0,
"step": 252
},
{
"epoch": 0.6052631578947368,
"grad_norm": 0.8279823527287432,
"learning_rate": 4.242864088633762e-06,
"loss": 1.2845,
"num_tokens": 51276675.0,
"step": 253
},
{
"epoch": 0.6076555023923444,
"grad_norm": 0.5263711391152379,
"learning_rate": 4.2093854528000955e-06,
"loss": 1.2058,
"num_tokens": 51505587.0,
"step": 254
},
{
"epoch": 0.6100478468899522,
"grad_norm": 0.6716320775000435,
"learning_rate": 4.175984474586572e-06,
"loss": 1.1494,
"num_tokens": 51659012.0,
"step": 255
},
{
"epoch": 0.6124401913875598,
"grad_norm": 0.584427424031427,
"learning_rate": 4.142663163764806e-06,
"loss": 1.4945,
"num_tokens": 51883796.0,
"step": 256
},
{
"epoch": 0.6148325358851675,
"grad_norm": 0.5336467093570565,
"learning_rate": 4.109423525312738e-06,
"loss": 1.4456,
"num_tokens": 52116569.0,
"step": 257
},
{
"epoch": 0.6172248803827751,
"grad_norm": 0.7082487916003727,
"learning_rate": 4.076267559293996e-06,
"loss": 1.2521,
"num_tokens": 52291317.0,
"step": 258
},
{
"epoch": 0.6196172248803827,
"grad_norm": 0.670162970807308,
"learning_rate": 4.043197260737556e-06,
"loss": 1.2754,
"num_tokens": 52459579.0,
"step": 259
},
{
"epoch": 0.6220095693779905,
"grad_norm": 0.6217645411272703,
"learning_rate": 4.0102146195176895e-06,
"loss": 0.9781,
"num_tokens": 52655741.0,
"step": 260
},
{
"epoch": 0.6244019138755981,
"grad_norm": 0.6680531388656962,
"learning_rate": 3.977321620234236e-06,
"loss": 1.2224,
"num_tokens": 52855219.0,
"step": 261
},
{
"epoch": 0.6267942583732058,
"grad_norm": 0.6570258266897226,
"learning_rate": 3.944520242093186e-06,
"loss": 1.0444,
"num_tokens": 53022058.0,
"step": 262
},
{
"epoch": 0.6291866028708134,
"grad_norm": 0.569909382172942,
"learning_rate": 3.911812458787592e-06,
"loss": 1.2736,
"num_tokens": 53260177.0,
"step": 263
},
{
"epoch": 0.631578947368421,
"grad_norm": 0.68299256588729,
"learning_rate": 3.8792002383788044e-06,
"loss": 1.1374,
"num_tokens": 53406591.0,
"step": 264
},
{
"epoch": 0.6339712918660287,
"grad_norm": 0.6110632336195695,
"learning_rate": 3.846685543178058e-06,
"loss": 1.3974,
"num_tokens": 53588048.0,
"step": 265
},
{
"epoch": 0.6363636363636364,
"grad_norm": 0.66529312464438,
"learning_rate": 3.8142703296283954e-06,
"loss": 1.1257,
"num_tokens": 53773418.0,
"step": 266
},
{
"epoch": 0.638755980861244,
"grad_norm": 0.7331354863770742,
"learning_rate": 3.7819565481869426e-06,
"loss": 1.135,
"num_tokens": 53949544.0,
"step": 267
},
{
"epoch": 0.6411483253588517,
"grad_norm": 0.691168541736748,
"learning_rate": 3.7497461432075477e-06,
"loss": 1.2486,
"num_tokens": 54137216.0,
"step": 268
},
{
"epoch": 0.6435406698564593,
"grad_norm": 0.640130099494791,
"learning_rate": 3.717641052823795e-06,
"loss": 1.0805,
"num_tokens": 54340688.0,
"step": 269
},
{
"epoch": 0.645933014354067,
"grad_norm": 0.5598721395410986,
"learning_rate": 3.6856432088323746e-06,
"loss": 1.3979,
"num_tokens": 54537725.0,
"step": 270
},
{
"epoch": 0.6483253588516746,
"grad_norm": 0.6272293974276336,
"learning_rate": 3.6537545365768543e-06,
"loss": 1.3884,
"num_tokens": 54744185.0,
"step": 271
},
{
"epoch": 0.6507177033492823,
"grad_norm": 0.6812681210406815,
"learning_rate": 3.6219769548318205e-06,
"loss": 1.2305,
"num_tokens": 54944311.0,
"step": 272
},
{
"epoch": 0.65311004784689,
"grad_norm": 0.5952820749936442,
"learning_rate": 3.5903123756874315e-06,
"loss": 1.3444,
"num_tokens": 55150654.0,
"step": 273
},
{
"epoch": 0.6555023923444976,
"grad_norm": 0.7839097202629515,
"learning_rate": 3.558762704434361e-06,
"loss": 1.1151,
"num_tokens": 55310137.0,
"step": 274
},
{
"epoch": 0.6578947368421053,
"grad_norm": 0.5761324938652522,
"learning_rate": 3.527329839449152e-06,
"loss": 1.5347,
"num_tokens": 55545292.0,
"step": 275
},
{
"epoch": 0.6602870813397129,
"grad_norm": 0.6610740055714768,
"learning_rate": 3.496015672079998e-06,
"loss": 1.1698,
"num_tokens": 55736137.0,
"step": 276
},
{
"epoch": 0.6626794258373205,
"grad_norm": 0.5351233279220332,
"learning_rate": 3.4648220865329312e-06,
"loss": 1.4086,
"num_tokens": 55973020.0,
"step": 277
},
{
"epoch": 0.6650717703349283,
"grad_norm": 0.5934609597098215,
"learning_rate": 3.4337509597584466e-06,
"loss": 1.2519,
"num_tokens": 56193548.0,
"step": 278
},
{
"epoch": 0.6674641148325359,
"grad_norm": 0.6315221359405954,
"learning_rate": 3.402804161338577e-06,
"loss": 1.4212,
"num_tokens": 56399279.0,
"step": 279
},
{
"epoch": 0.6698564593301436,
"grad_norm": 0.5278279782038036,
"learning_rate": 3.371983553374375e-06,
"loss": 1.476,
"num_tokens": 56627906.0,
"step": 280
},
{
"epoch": 0.6722488038277512,
"grad_norm": 0.576586533979081,
"learning_rate": 3.3412909903738937e-06,
"loss": 1.2902,
"num_tokens": 56809932.0,
"step": 281
},
{
"epoch": 0.6746411483253588,
"grad_norm": 0.6467781327397376,
"learning_rate": 3.310728319140581e-06,
"loss": 1.2593,
"num_tokens": 56984901.0,
"step": 282
},
{
"epoch": 0.6770334928229665,
"grad_norm": 0.5025331960324733,
"learning_rate": 3.2802973786621665e-06,
"loss": 1.3005,
"num_tokens": 57203056.0,
"step": 283
},
{
"epoch": 0.6794258373205742,
"grad_norm": 0.6049658416582278,
"learning_rate": 3.2500000000000015e-06,
"loss": 1.2841,
"num_tokens": 57416534.0,
"step": 284
},
{
"epoch": 0.6818181818181818,
"grad_norm": 0.6356567810366318,
"learning_rate": 3.2198380061788803e-06,
"loss": 1.35,
"num_tokens": 57634005.0,
"step": 285
},
{
"epoch": 0.6842105263157895,
"grad_norm": 0.6259305842227357,
"learning_rate": 3.1898132120773566e-06,
"loss": 1.1986,
"num_tokens": 57820926.0,
"step": 286
},
{
"epoch": 0.6866028708133971,
"grad_norm": 0.5597883896460356,
"learning_rate": 3.1599274243185314e-06,
"loss": 1.4769,
"num_tokens": 58055489.0,
"step": 287
},
{
"epoch": 0.6889952153110048,
"grad_norm": 0.6331651732983823,
"learning_rate": 3.1301824411613473e-06,
"loss": 1.2765,
"num_tokens": 58245472.0,
"step": 288
},
{
"epoch": 0.6913875598086124,
"grad_norm": 0.6737504201758567,
"learning_rate": 3.1005800523923906e-06,
"loss": 1.1567,
"num_tokens": 58473327.0,
"step": 289
},
{
"epoch": 0.69377990430622,
"grad_norm": 0.8142514854229884,
"learning_rate": 3.071122039218194e-06,
"loss": 1.3968,
"num_tokens": 58649683.0,
"step": 290
},
{
"epoch": 0.6961722488038278,
"grad_norm": 0.5368448130491544,
"learning_rate": 3.0418101741580586e-06,
"loss": 1.2969,
"num_tokens": 58868455.0,
"step": 291
},
{
"epoch": 0.6985645933014354,
"grad_norm": 0.6697873240820909,
"learning_rate": 3.012646220937403e-06,
"loss": 1.3331,
"num_tokens": 59087101.0,
"step": 292
},
{
"epoch": 0.7009569377990431,
"grad_norm": 0.6546487553486491,
"learning_rate": 2.98363193438164e-06,
"loss": 1.3721,
"num_tokens": 59275075.0,
"step": 293
},
{
"epoch": 0.7033492822966507,
"grad_norm": 0.5913784460775687,
"learning_rate": 2.9547690603105774e-06,
"loss": 1.3077,
"num_tokens": 59490836.0,
"step": 294
},
{
"epoch": 0.7057416267942583,
"grad_norm": 1.0971211488496566,
"learning_rate": 2.926059335433378e-06,
"loss": 1.3629,
"num_tokens": 59688459.0,
"step": 295
},
{
"epoch": 0.7081339712918661,
"grad_norm": 0.9520603620495983,
"learning_rate": 2.897504487244061e-06,
"loss": 1.1589,
"num_tokens": 59854480.0,
"step": 296
},
{
"epoch": 0.7105263157894737,
"grad_norm": 0.6060298756711499,
"learning_rate": 2.8691062339175512e-06,
"loss": 1.3453,
"num_tokens": 60038638.0,
"step": 297
},
{
"epoch": 0.7129186602870813,
"grad_norm": 0.6435411063885081,
"learning_rate": 2.8408662842063002e-06,
"loss": 1.278,
"num_tokens": 60203017.0,
"step": 298
},
{
"epoch": 0.715311004784689,
"grad_norm": 0.6129870408682873,
"learning_rate": 2.8127863373374637e-06,
"loss": 1.3159,
"num_tokens": 60465940.0,
"step": 299
},
{
"epoch": 0.7177033492822966,
"grad_norm": 0.7588206471875415,
"learning_rate": 2.7848680829106602e-06,
"loss": 1.3377,
"num_tokens": 60632204.0,
"step": 300
},
{
"epoch": 0.7200956937799043,
"grad_norm": 0.6755921460278633,
"learning_rate": 2.7571132007963074e-06,
"loss": 1.3766,
"num_tokens": 60862058.0,
"step": 301
},
{
"epoch": 0.722488038277512,
"grad_norm": 0.7825671027002948,
"learning_rate": 2.7295233610345384e-06,
"loss": 1.4489,
"num_tokens": 61079432.0,
"step": 302
},
{
"epoch": 0.7248803827751196,
"grad_norm": 0.7227060900229081,
"learning_rate": 2.7021002237347206e-06,
"loss": 1.2062,
"num_tokens": 61255175.0,
"step": 303
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.5029929904055921,
"learning_rate": 2.6748454389755576e-06,
"loss": 1.4672,
"num_tokens": 61519012.0,
"step": 304
},
{
"epoch": 0.7296650717703349,
"grad_norm": 0.5685050474018025,
"learning_rate": 2.647760646705804e-06,
"loss": 1.2661,
"num_tokens": 61737384.0,
"step": 305
},
{
"epoch": 0.7320574162679426,
"grad_norm": 0.5737444678346549,
"learning_rate": 2.620847476645594e-06,
"loss": 1.3634,
"num_tokens": 62014122.0,
"step": 306
},
{
"epoch": 0.7344497607655502,
"grad_norm": 0.5367398412947724,
"learning_rate": 2.5941075481883705e-06,
"loss": 1.2674,
"num_tokens": 62246863.0,
"step": 307
},
{
"epoch": 0.7368421052631579,
"grad_norm": 0.6243016009006609,
"learning_rate": 2.567542470303452e-06,
"loss": 1.168,
"num_tokens": 62409808.0,
"step": 308
},
{
"epoch": 0.7392344497607656,
"grad_norm": 0.5971922768594424,
"learning_rate": 2.5411538414392146e-06,
"loss": 1.2759,
"num_tokens": 62601276.0,
"step": 309
},
{
"epoch": 0.7416267942583732,
"grad_norm": 0.6815558823471888,
"learning_rate": 2.5149432494269134e-06,
"loss": 1.2409,
"num_tokens": 62811825.0,
"step": 310
},
{
"epoch": 0.7440191387559809,
"grad_norm": 0.5840128801515716,
"learning_rate": 2.4889122713851397e-06,
"loss": 1.3169,
"num_tokens": 63033086.0,
"step": 311
},
{
"epoch": 0.7464114832535885,
"grad_norm": 0.6273197788202933,
"learning_rate": 2.463062473624927e-06,
"loss": 1.3796,
"num_tokens": 63231740.0,
"step": 312
},
{
"epoch": 0.7488038277511961,
"grad_norm": 0.5265456617487613,
"learning_rate": 2.437395411555504e-06,
"loss": 1.5587,
"num_tokens": 63484868.0,
"step": 313
},
{
"epoch": 0.7511961722488039,
"grad_norm": 0.63416554890546,
"learning_rate": 2.4119126295906997e-06,
"loss": 1.337,
"num_tokens": 63670016.0,
"step": 314
},
{
"epoch": 0.7535885167464115,
"grad_norm": 0.5715322993600752,
"learning_rate": 2.3866156610560186e-06,
"loss": 1.3791,
"num_tokens": 63908153.0,
"step": 315
},
{
"epoch": 0.7559808612440191,
"grad_norm": 0.5352522962690041,
"learning_rate": 2.3615060280963797e-06,
"loss": 1.3869,
"num_tokens": 64124869.0,
"step": 316
},
{
"epoch": 0.7583732057416268,
"grad_norm": 0.6645309100106329,
"learning_rate": 2.3365852415845225e-06,
"loss": 1.1748,
"num_tokens": 64294032.0,
"step": 317
},
{
"epoch": 0.7607655502392344,
"grad_norm": 0.638253408315402,
"learning_rate": 2.3118548010301015e-06,
"loss": 1.3481,
"num_tokens": 64476190.0,
"step": 318
},
{
"epoch": 0.7631578947368421,
"grad_norm": 0.6354512288625345,
"learning_rate": 2.2873161944894552e-06,
"loss": 1.2763,
"num_tokens": 64648365.0,
"step": 319
},
{
"epoch": 0.7655502392344498,
"grad_norm": 0.6556279261993273,
"learning_rate": 2.262970898476071e-06,
"loss": 1.1877,
"num_tokens": 64835000.0,
"step": 320
},
{
"epoch": 0.7679425837320574,
"grad_norm": 0.5810049291016075,
"learning_rate": 2.2388203778717407e-06,
"loss": 1.2494,
"num_tokens": 65026464.0,
"step": 321
},
{
"epoch": 0.7703349282296651,
"grad_norm": 0.6418564147429942,
"learning_rate": 2.2148660858384147e-06,
"loss": 1.2868,
"num_tokens": 65251252.0,
"step": 322
},
{
"epoch": 0.7727272727272727,
"grad_norm": 0.48981392426231535,
"learning_rate": 2.1911094637307715e-06,
"loss": 1.3262,
"num_tokens": 65517715.0,
"step": 323
},
{
"epoch": 0.7751196172248804,
"grad_norm": 0.570398102904161,
"learning_rate": 2.1675519410094803e-06,
"loss": 1.2075,
"num_tokens": 65701842.0,
"step": 324
},
{
"epoch": 0.777511961722488,
"grad_norm": 0.718887759011956,
"learning_rate": 2.144194935155192e-06,
"loss": 1.0361,
"num_tokens": 65890598.0,
"step": 325
},
{
"epoch": 0.7799043062200957,
"grad_norm": 0.5958578032562041,
"learning_rate": 2.121039851583254e-06,
"loss": 1.1784,
"num_tokens": 66089193.0,
"step": 326
},
{
"epoch": 0.7822966507177034,
"grad_norm": 0.6321165266280917,
"learning_rate": 2.098088083559135e-06,
"loss": 1.4724,
"num_tokens": 66314970.0,
"step": 327
},
{
"epoch": 0.784688995215311,
"grad_norm": 0.7307933906135672,
"learning_rate": 2.0753410121145984e-06,
"loss": 1.3178,
"num_tokens": 66480313.0,
"step": 328
},
{
"epoch": 0.7870813397129187,
"grad_norm": 0.5974020419425694,
"learning_rate": 2.0528000059646e-06,
"loss": 1.319,
"num_tokens": 66682404.0,
"step": 329
},
{
"epoch": 0.7894736842105263,
"grad_norm": 0.6678260669267934,
"learning_rate": 2.0304664214249326e-06,
"loss": 1.1726,
"num_tokens": 66883951.0,
"step": 330
},
{
"epoch": 0.7918660287081339,
"grad_norm": 0.6478957893104581,
"learning_rate": 2.0083416023306163e-06,
"loss": 1.3654,
"num_tokens": 67061851.0,
"step": 331
},
{
"epoch": 0.7942583732057417,
"grad_norm": 0.6407419233083412,
"learning_rate": 1.986426879955034e-06,
"loss": 1.1735,
"num_tokens": 67244214.0,
"step": 332
},
{
"epoch": 0.7966507177033493,
"grad_norm": 0.5771873678756463,
"learning_rate": 1.9647235729298346e-06,
"loss": 1.2519,
"num_tokens": 67423338.0,
"step": 333
},
{
"epoch": 0.7990430622009569,
"grad_norm": 0.5247220569941472,
"learning_rate": 1.9432329871655837e-06,
"loss": 1.3961,
"num_tokens": 67651988.0,
"step": 334
},
{
"epoch": 0.8014354066985646,
"grad_norm": 0.5562304496267458,
"learning_rate": 1.9219564157731848e-06,
"loss": 1.3009,
"num_tokens": 67899296.0,
"step": 335
},
{
"epoch": 0.8038277511961722,
"grad_norm": 0.6411488634701462,
"learning_rate": 1.9008951389860785e-06,
"loss": 1.1231,
"num_tokens": 68079644.0,
"step": 336
},
{
"epoch": 0.80622009569378,
"grad_norm": 0.5322759860676641,
"learning_rate": 1.8800504240832012e-06,
"loss": 1.3515,
"num_tokens": 68293723.0,
"step": 337
},
{
"epoch": 0.8086124401913876,
"grad_norm": 0.5746223661246294,
"learning_rate": 1.8594235253127373e-06,
"loss": 1.2591,
"num_tokens": 68529723.0,
"step": 338
},
{
"epoch": 0.8110047846889952,
"grad_norm": 0.7085560876362006,
"learning_rate": 1.8390156838166464e-06,
"loss": 1.1149,
"num_tokens": 68705404.0,
"step": 339
},
{
"epoch": 0.8133971291866029,
"grad_norm": 0.5845527495613347,
"learning_rate": 1.8188281275559866e-06,
"loss": 1.2183,
"num_tokens": 68954531.0,
"step": 340
},
{
"epoch": 0.8157894736842105,
"grad_norm": 0.5202134256783918,
"learning_rate": 1.7988620712370197e-06,
"loss": 1.4849,
"num_tokens": 69236634.0,
"step": 341
},
{
"epoch": 0.8181818181818182,
"grad_norm": 0.5645505834969143,
"learning_rate": 1.7791187162381325e-06,
"loss": 1.4002,
"num_tokens": 69451514.0,
"step": 342
},
{
"epoch": 0.8205741626794258,
"grad_norm": 0.5607875212664545,
"learning_rate": 1.759599250537534e-06,
"loss": 1.3259,
"num_tokens": 69682913.0,
"step": 343
},
{
"epoch": 0.8229665071770335,
"grad_norm": 0.559929672578073,
"learning_rate": 1.740304848641787e-06,
"loss": 1.2551,
"num_tokens": 69901240.0,
"step": 344
},
{
"epoch": 0.8253588516746412,
"grad_norm": 0.6095925983926321,
"learning_rate": 1.7212366715151263e-06,
"loss": 1.287,
"num_tokens": 70116810.0,
"step": 345
},
{
"epoch": 0.8277511961722488,
"grad_norm": 0.6240251911568065,
"learning_rate": 1.702395866509612e-06,
"loss": 1.1325,
"num_tokens": 70301449.0,
"step": 346
},
{
"epoch": 0.8301435406698564,
"grad_norm": 0.5784795095657023,
"learning_rate": 1.6837835672960834e-06,
"loss": 1.2059,
"num_tokens": 70492602.0,
"step": 347
},
{
"epoch": 0.8325358851674641,
"grad_norm": 0.6504184788969882,
"learning_rate": 1.6654008937959498e-06,
"loss": 0.9334,
"num_tokens": 70656041.0,
"step": 348
},
{
"epoch": 0.8349282296650717,
"grad_norm": 0.5702583567689546,
"learning_rate": 1.6472489521138016e-06,
"loss": 1.2118,
"num_tokens": 70905608.0,
"step": 349
},
{
"epoch": 0.8373205741626795,
"grad_norm": 0.6274124275648116,
"learning_rate": 1.629328834470857e-06,
"loss": 1.1815,
"num_tokens": 71101191.0,
"step": 350
},
{
"epoch": 0.8397129186602871,
"grad_norm": 0.6033829272863582,
"learning_rate": 1.611641619139238e-06,
"loss": 1.2807,
"num_tokens": 71291926.0,
"step": 351
},
{
"epoch": 0.8421052631578947,
"grad_norm": 0.6273406564056667,
"learning_rate": 1.5941883703770968e-06,
"loss": 1.3962,
"num_tokens": 71533733.0,
"step": 352
},
{
"epoch": 0.8444976076555024,
"grad_norm": 0.5312463864078979,
"learning_rate": 1.57697013836457e-06,
"loss": 1.3811,
"num_tokens": 71747066.0,
"step": 353
},
{
"epoch": 0.84688995215311,
"grad_norm": 0.7082921095962508,
"learning_rate": 1.5599879591405917e-06,
"loss": 1.1701,
"num_tokens": 71900238.0,
"step": 354
},
{
"epoch": 0.8492822966507177,
"grad_norm": 0.6134430059504471,
"learning_rate": 1.5432428545405554e-06,
"loss": 1.2446,
"num_tokens": 72085575.0,
"step": 355
},
{
"epoch": 0.8516746411483254,
"grad_norm": 0.6204954269243784,
"learning_rate": 1.526735832134829e-06,
"loss": 1.1297,
"num_tokens": 72322159.0,
"step": 356
},
{
"epoch": 0.854066985645933,
"grad_norm": 0.565525186855862,
"learning_rate": 1.5104678851681253e-06,
"loss": 1.4408,
"num_tokens": 72528384.0,
"step": 357
},
{
"epoch": 0.8564593301435407,
"grad_norm": 0.5682317963462433,
"learning_rate": 1.4944399924997372e-06,
"loss": 1.3499,
"num_tokens": 72730257.0,
"step": 358
},
{
"epoch": 0.8588516746411483,
"grad_norm": 0.5129599811585936,
"learning_rate": 1.4786531185446455e-06,
"loss": 1.2999,
"num_tokens": 72969626.0,
"step": 359
},
{
"epoch": 0.861244019138756,
"grad_norm": 0.6444665971353251,
"learning_rate": 1.4631082132154806e-06,
"loss": 1.2785,
"num_tokens": 73149240.0,
"step": 360
},
{
"epoch": 0.8636363636363636,
"grad_norm": 0.5547734172552142,
"learning_rate": 1.4478062118653703e-06,
"loss": 1.4096,
"num_tokens": 73363813.0,
"step": 361
},
{
"epoch": 0.8660287081339713,
"grad_norm": 0.6371851858022577,
"learning_rate": 1.4327480352316581e-06,
"loss": 1.2693,
"num_tokens": 73551589.0,
"step": 362
},
{
"epoch": 0.868421052631579,
"grad_norm": 0.5546864610023559,
"learning_rate": 1.417934589380498e-06,
"loss": 1.403,
"num_tokens": 73790910.0,
"step": 363
},
{
"epoch": 0.8708133971291866,
"grad_norm": 0.6219072093620266,
"learning_rate": 1.4033667656523405e-06,
"loss": 1.2085,
"num_tokens": 73985624.0,
"step": 364
},
{
"epoch": 0.8732057416267942,
"grad_norm": 0.6235130422407937,
"learning_rate": 1.389045440608296e-06,
"loss": 1.0986,
"num_tokens": 74148795.0,
"step": 365
},
{
"epoch": 0.8755980861244019,
"grad_norm": 0.6006216529271874,
"learning_rate": 1.374971475977394e-06,
"loss": 1.3025,
"num_tokens": 74332841.0,
"step": 366
},
{
"epoch": 0.8779904306220095,
"grad_norm": 0.6404509379440522,
"learning_rate": 1.361145718604731e-06,
"loss": 1.2766,
"num_tokens": 74529785.0,
"step": 367
},
{
"epoch": 0.8803827751196173,
"grad_norm": 0.686225898948988,
"learning_rate": 1.3475690004005098e-06,
"loss": 1.2452,
"num_tokens": 74703039.0,
"step": 368
},
{
"epoch": 0.8827751196172249,
"grad_norm": 0.571283865051148,
"learning_rate": 1.3342421382899936e-06,
"loss": 1.2626,
"num_tokens": 74883297.0,
"step": 369
},
{
"epoch": 0.8851674641148325,
"grad_norm": 0.7382176987149066,
"learning_rate": 1.3211659341643412e-06,
"loss": 1.233,
"num_tokens": 75066196.0,
"step": 370
},
{
"epoch": 0.8875598086124402,
"grad_norm": 0.6611644800778096,
"learning_rate": 1.308341174832359e-06,
"loss": 1.1876,
"num_tokens": 75232348.0,
"step": 371
},
{
"epoch": 0.8899521531100478,
"grad_norm": 0.5130766984472136,
"learning_rate": 1.2957686319731623e-06,
"loss": 1.2034,
"num_tokens": 75458529.0,
"step": 372
},
{
"epoch": 0.8923444976076556,
"grad_norm": 0.5684819034489474,
"learning_rate": 1.2834490620897342e-06,
"loss": 1.1584,
"num_tokens": 75661699.0,
"step": 373
},
{
"epoch": 0.8947368421052632,
"grad_norm": 0.5422081420273679,
"learning_rate": 1.2713832064634127e-06,
"loss": 1.3424,
"num_tokens": 75916455.0,
"step": 374
},
{
"epoch": 0.8971291866028708,
"grad_norm": 0.617788912813985,
"learning_rate": 1.259571791109285e-06,
"loss": 1.3227,
"num_tokens": 76100165.0,
"step": 375
},
{
"epoch": 0.8995215311004785,
"grad_norm": 0.5559640103832523,
"learning_rate": 1.2480155267325039e-06,
"loss": 1.1962,
"num_tokens": 76326481.0,
"step": 376
},
{
"epoch": 0.9019138755980861,
"grad_norm": 0.5930770090555496,
"learning_rate": 1.2367151086855187e-06,
"loss": 1.2261,
"num_tokens": 76482451.0,
"step": 377
},
{
"epoch": 0.9043062200956937,
"grad_norm": 0.6461043468488586,
"learning_rate": 1.2256712169262415e-06,
"loss": 1.3309,
"num_tokens": 76692581.0,
"step": 378
},
{
"epoch": 0.9066985645933014,
"grad_norm": 0.6295579626427625,
"learning_rate": 1.2148845159771311e-06,
"loss": 1.0403,
"num_tokens": 76869394.0,
"step": 379
},
{
"epoch": 0.9090909090909091,
"grad_norm": 0.6317114832123317,
"learning_rate": 1.2043556548852065e-06,
"loss": 1.3877,
"num_tokens": 77057755.0,
"step": 380
},
{
"epoch": 0.9114832535885168,
"grad_norm": 0.5646978093938921,
"learning_rate": 1.1940852671829938e-06,
"loss": 1.1932,
"num_tokens": 77279900.0,
"step": 381
},
{
"epoch": 0.9138755980861244,
"grad_norm": 0.554856696245118,
"learning_rate": 1.184073970850408e-06,
"loss": 1.3931,
"num_tokens": 77474484.0,
"step": 382
},
{
"epoch": 0.916267942583732,
"grad_norm": 0.5125502030227311,
"learning_rate": 1.174322368277565e-06,
"loss": 1.5298,
"num_tokens": 77739918.0,
"step": 383
},
{
"epoch": 0.9186602870813397,
"grad_norm": 0.6245967453038902,
"learning_rate": 1.1648310462285386e-06,
"loss": 1.2966,
"num_tokens": 77876514.0,
"step": 384
},
{
"epoch": 0.9210526315789473,
"grad_norm": 0.510131355499661,
"learning_rate": 1.1556005758060517e-06,
"loss": 1.3328,
"num_tokens": 78102264.0,
"step": 385
},
{
"epoch": 0.9234449760765551,
"grad_norm": 0.5313647688128483,
"learning_rate": 1.146631512417113e-06,
"loss": 1.4715,
"num_tokens": 78336063.0,
"step": 386
},
{
"epoch": 0.9258373205741627,
"grad_norm": 0.5521683071808053,
"learning_rate": 1.1379243957395987e-06,
"loss": 1.231,
"num_tokens": 78529771.0,
"step": 387
},
{
"epoch": 0.9282296650717703,
"grad_norm": 0.7086439907733622,
"learning_rate": 1.1294797496897786e-06,
"loss": 1.1614,
"num_tokens": 78679587.0,
"step": 388
},
{
"epoch": 0.930622009569378,
"grad_norm": 0.5802516939308205,
"learning_rate": 1.121298082390793e-06,
"loss": 1.3184,
"num_tokens": 78878694.0,
"step": 389
},
{
"epoch": 0.9330143540669856,
"grad_norm": 0.5736760000180704,
"learning_rate": 1.113379886142075e-06,
"loss": 1.3361,
"num_tokens": 79073779.0,
"step": 390
},
{
"epoch": 0.9354066985645934,
"grad_norm": 0.5432525121846279,
"learning_rate": 1.105725637389732e-06,
"loss": 1.2006,
"num_tokens": 79270679.0,
"step": 391
},
{
"epoch": 0.937799043062201,
"grad_norm": 0.6631735593901804,
"learning_rate": 1.0983357966978747e-06,
"loss": 1.4223,
"num_tokens": 79451866.0,
"step": 392
},
{
"epoch": 0.9401913875598086,
"grad_norm": 0.5833059272480773,
"learning_rate": 1.0912108087209075e-06,
"loss": 1.3483,
"num_tokens": 79629121.0,
"step": 393
},
{
"epoch": 0.9425837320574163,
"grad_norm": 0.521975187666564,
"learning_rate": 1.084351102176769e-06,
"loss": 1.3392,
"num_tokens": 79864771.0,
"step": 394
},
{
"epoch": 0.9449760765550239,
"grad_norm": 0.5983490163395889,
"learning_rate": 1.0777570898211406e-06,
"loss": 1.138,
"num_tokens": 80090837.0,
"step": 395
},
{
"epoch": 0.9473684210526315,
"grad_norm": 0.6791397944349069,
"learning_rate": 1.0714291684226054e-06,
"loss": 1.1482,
"num_tokens": 80247684.0,
"step": 396
},
{
"epoch": 0.9497607655502392,
"grad_norm": 0.7135484583851197,
"learning_rate": 1.0653677187387787e-06,
"loss": 1.178,
"num_tokens": 80412689.0,
"step": 397
},
{
"epoch": 0.9521531100478469,
"grad_norm": 0.5467847425622099,
"learning_rate": 1.0595731054933937e-06,
"loss": 1.256,
"num_tokens": 80646002.0,
"step": 398
},
{
"epoch": 0.9545454545454546,
"grad_norm": 0.649264408997545,
"learning_rate": 1.0540456773543596e-06,
"loss": 1.2587,
"num_tokens": 80846542.0,
"step": 399
},
{
"epoch": 0.9569377990430622,
"grad_norm": 0.6408697258124432,
"learning_rate": 1.0487857669127782e-06,
"loss": 1.2991,
"num_tokens": 81039183.0,
"step": 400
},
{
"epoch": 0.9593301435406698,
"grad_norm": 0.6125041198323196,
"learning_rate": 1.0437936906629336e-06,
"loss": 1.2721,
"num_tokens": 81228745.0,
"step": 401
},
{
"epoch": 0.9617224880382775,
"grad_norm": 0.6078405250476319,
"learning_rate": 1.039069748983248e-06,
"loss": 1.3821,
"num_tokens": 81425019.0,
"step": 402
},
{
"epoch": 0.9641148325358851,
"grad_norm": 0.5595011118119477,
"learning_rate": 1.0346142261182064e-06,
"loss": 1.0514,
"num_tokens": 81634634.0,
"step": 403
},
{
"epoch": 0.9665071770334929,
"grad_norm": 0.4972617449675274,
"learning_rate": 1.0304273901612566e-06,
"loss": 1.341,
"num_tokens": 81852833.0,
"step": 404
},
{
"epoch": 0.9688995215311005,
"grad_norm": 0.5059294656620951,
"learning_rate": 1.0265094930386741e-06,
"loss": 1.309,
"num_tokens": 82114913.0,
"step": 405
},
{
"epoch": 0.9712918660287081,
"grad_norm": 0.5962308760747792,
"learning_rate": 1.0228607704944048e-06,
"loss": 1.3641,
"num_tokens": 82299779.0,
"step": 406
},
{
"epoch": 0.9736842105263158,
"grad_norm": 0.6131816008639356,
"learning_rate": 1.0194814420758806e-06,
"loss": 1.1839,
"num_tokens": 82469216.0,
"step": 407
},
{
"epoch": 0.9760765550239234,
"grad_norm": 0.6115969172998058,
"learning_rate": 1.0163717111208086e-06,
"loss": 1.3046,
"num_tokens": 82640081.0,
"step": 408
},
{
"epoch": 0.9784688995215312,
"grad_norm": 0.5612129186921679,
"learning_rate": 1.0135317647449362e-06,
"loss": 1.4059,
"num_tokens": 82843981.0,
"step": 409
},
{
"epoch": 0.9808612440191388,
"grad_norm": 0.587067444004318,
"learning_rate": 1.0109617738307914e-06,
"loss": 1.2261,
"num_tokens": 83020444.0,
"step": 410
},
{
"epoch": 0.9832535885167464,
"grad_norm": 0.5594996533209011,
"learning_rate": 1.0086618930174011e-06,
"loss": 1.2042,
"num_tokens": 83232598.0,
"step": 411
},
{
"epoch": 0.9856459330143541,
"grad_norm": 0.7071205597794052,
"learning_rate": 1.006632260690988e-06,
"loss": 1.2169,
"num_tokens": 83374569.0,
"step": 412
},
{
"epoch": 0.9880382775119617,
"grad_norm": 0.6748664203646131,
"learning_rate": 1.0048729989766396e-06,
"loss": 1.1071,
"num_tokens": 83586085.0,
"step": 413
},
{
"epoch": 0.9904306220095693,
"grad_norm": 0.5910387645740945,
"learning_rate": 1.0033842137309649e-06,
"loss": 1.4125,
"num_tokens": 83786689.0,
"step": 414
},
{
"epoch": 0.992822966507177,
"grad_norm": 0.6362289532425913,
"learning_rate": 1.0021659945357202e-06,
"loss": 1.2213,
"num_tokens": 83967541.0,
"step": 415
},
{
"epoch": 0.9952153110047847,
"grad_norm": 0.5429466958407293,
"learning_rate": 1.0012184146924225e-06,
"loss": 1.3142,
"num_tokens": 84191970.0,
"step": 416
},
{
"epoch": 0.9976076555023924,
"grad_norm": 0.5637271505857214,
"learning_rate": 1.0005415312179367e-06,
"loss": 1.3643,
"num_tokens": 84430751.0,
"step": 417
},
{
"epoch": 1.0,
"grad_norm": 0.5655593189952651,
"learning_rate": 1.0001353848410461e-06,
"loss": 1.2425,
"num_tokens": 84668558.0,
"step": 418
},
{
"epoch": 1.0,
"eval_loss": 0.7911388278007507,
"eval_num_tokens": 84668558.0,
"eval_runtime": 47.4555,
"eval_samples_per_second": 62.627,
"eval_steps_per_second": 7.839,
"step": 418
},
{
"epoch": 1.0,
"step": 418,
"total_flos": 179772395159552.0,
"train_loss": 1.3804568826581873,
"train_runtime": 1345.8555,
"train_samples_per_second": 19.872,
"train_steps_per_second": 0.311
}
],
"logging_steps": 1,
"max_steps": 418,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 179772395159552.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}