seed_math_tiger_math_reasoninghp / trainer_state.json
sedrickkeh's picture
End of training
fc7e61e verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.99712,
"eval_steps": 500,
"global_step": 1560,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00192,
"grad_norm": 3.654453992843628,
"learning_rate": 6.41025641025641e-08,
"loss": 0.4701,
"step": 1
},
{
"epoch": 0.00384,
"grad_norm": 3.9613118171691895,
"learning_rate": 1.282051282051282e-07,
"loss": 0.4673,
"step": 2
},
{
"epoch": 0.00576,
"grad_norm": 3.5779571533203125,
"learning_rate": 1.9230769230769234e-07,
"loss": 0.5032,
"step": 3
},
{
"epoch": 0.00768,
"grad_norm": 3.898608446121216,
"learning_rate": 2.564102564102564e-07,
"loss": 0.54,
"step": 4
},
{
"epoch": 0.0096,
"grad_norm": 3.6843831539154053,
"learning_rate": 3.205128205128205e-07,
"loss": 0.474,
"step": 5
},
{
"epoch": 0.01152,
"grad_norm": 3.858992576599121,
"learning_rate": 3.846153846153847e-07,
"loss": 0.5628,
"step": 6
},
{
"epoch": 0.01344,
"grad_norm": 3.64652419090271,
"learning_rate": 4.4871794871794876e-07,
"loss": 0.4501,
"step": 7
},
{
"epoch": 0.01536,
"grad_norm": 3.838735818862915,
"learning_rate": 5.128205128205128e-07,
"loss": 0.5109,
"step": 8
},
{
"epoch": 0.01728,
"grad_norm": 3.4110610485076904,
"learning_rate": 5.76923076923077e-07,
"loss": 0.4836,
"step": 9
},
{
"epoch": 0.0192,
"grad_norm": 3.511805295944214,
"learning_rate": 6.41025641025641e-07,
"loss": 0.539,
"step": 10
},
{
"epoch": 0.02112,
"grad_norm": 3.3596291542053223,
"learning_rate": 7.051282051282052e-07,
"loss": 0.5027,
"step": 11
},
{
"epoch": 0.02304,
"grad_norm": 3.387565851211548,
"learning_rate": 7.692307692307694e-07,
"loss": 0.5813,
"step": 12
},
{
"epoch": 0.02496,
"grad_norm": 2.8245911598205566,
"learning_rate": 8.333333333333333e-07,
"loss": 0.5209,
"step": 13
},
{
"epoch": 0.02688,
"grad_norm": 2.54447078704834,
"learning_rate": 8.974358974358975e-07,
"loss": 0.4601,
"step": 14
},
{
"epoch": 0.0288,
"grad_norm": 2.7078425884246826,
"learning_rate": 9.615384615384617e-07,
"loss": 0.4534,
"step": 15
},
{
"epoch": 0.03072,
"grad_norm": 2.5482826232910156,
"learning_rate": 1.0256410256410257e-06,
"loss": 0.4588,
"step": 16
},
{
"epoch": 0.03264,
"grad_norm": 2.3164541721343994,
"learning_rate": 1.0897435897435899e-06,
"loss": 0.441,
"step": 17
},
{
"epoch": 0.03456,
"grad_norm": 2.186650276184082,
"learning_rate": 1.153846153846154e-06,
"loss": 0.4739,
"step": 18
},
{
"epoch": 0.03648,
"grad_norm": 2.039811372756958,
"learning_rate": 1.217948717948718e-06,
"loss": 0.4986,
"step": 19
},
{
"epoch": 0.0384,
"grad_norm": 1.982841968536377,
"learning_rate": 1.282051282051282e-06,
"loss": 0.475,
"step": 20
},
{
"epoch": 0.04032,
"grad_norm": 1.8235762119293213,
"learning_rate": 1.3461538461538462e-06,
"loss": 0.4436,
"step": 21
},
{
"epoch": 0.04224,
"grad_norm": 1.9258081912994385,
"learning_rate": 1.4102564102564104e-06,
"loss": 0.4979,
"step": 22
},
{
"epoch": 0.04416,
"grad_norm": 1.723280906677246,
"learning_rate": 1.4743589743589745e-06,
"loss": 0.3601,
"step": 23
},
{
"epoch": 0.04608,
"grad_norm": 1.2363345623016357,
"learning_rate": 1.5384615384615387e-06,
"loss": 0.457,
"step": 24
},
{
"epoch": 0.048,
"grad_norm": 1.3034982681274414,
"learning_rate": 1.602564102564103e-06,
"loss": 0.4655,
"step": 25
},
{
"epoch": 0.04992,
"grad_norm": 1.2239552736282349,
"learning_rate": 1.6666666666666667e-06,
"loss": 0.3759,
"step": 26
},
{
"epoch": 0.05184,
"grad_norm": 1.295291781425476,
"learning_rate": 1.7307692307692308e-06,
"loss": 0.4673,
"step": 27
},
{
"epoch": 0.05376,
"grad_norm": 1.1645803451538086,
"learning_rate": 1.794871794871795e-06,
"loss": 0.4374,
"step": 28
},
{
"epoch": 0.05568,
"grad_norm": 1.1368969678878784,
"learning_rate": 1.8589743589743592e-06,
"loss": 0.4103,
"step": 29
},
{
"epoch": 0.0576,
"grad_norm": 0.9449895620346069,
"learning_rate": 1.9230769230769234e-06,
"loss": 0.3372,
"step": 30
},
{
"epoch": 0.05952,
"grad_norm": 1.033974289894104,
"learning_rate": 1.987179487179487e-06,
"loss": 0.3948,
"step": 31
},
{
"epoch": 0.06144,
"grad_norm": 0.8340185880661011,
"learning_rate": 2.0512820512820513e-06,
"loss": 0.3671,
"step": 32
},
{
"epoch": 0.06336,
"grad_norm": 0.8375274538993835,
"learning_rate": 2.1153846153846155e-06,
"loss": 0.34,
"step": 33
},
{
"epoch": 0.06528,
"grad_norm": 0.993920624256134,
"learning_rate": 2.1794871794871797e-06,
"loss": 0.3989,
"step": 34
},
{
"epoch": 0.0672,
"grad_norm": 0.9816155433654785,
"learning_rate": 2.243589743589744e-06,
"loss": 0.4788,
"step": 35
},
{
"epoch": 0.06912,
"grad_norm": 1.0060805082321167,
"learning_rate": 2.307692307692308e-06,
"loss": 0.4578,
"step": 36
},
{
"epoch": 0.07104,
"grad_norm": 0.9013523459434509,
"learning_rate": 2.371794871794872e-06,
"loss": 0.3578,
"step": 37
},
{
"epoch": 0.07296,
"grad_norm": 0.9355642795562744,
"learning_rate": 2.435897435897436e-06,
"loss": 0.4569,
"step": 38
},
{
"epoch": 0.07488,
"grad_norm": 0.861568808555603,
"learning_rate": 2.5e-06,
"loss": 0.3638,
"step": 39
},
{
"epoch": 0.0768,
"grad_norm": 0.8118944764137268,
"learning_rate": 2.564102564102564e-06,
"loss": 0.4376,
"step": 40
},
{
"epoch": 0.07872,
"grad_norm": 0.8716627359390259,
"learning_rate": 2.6282051282051286e-06,
"loss": 0.4433,
"step": 41
},
{
"epoch": 0.08064,
"grad_norm": 0.8564445376396179,
"learning_rate": 2.6923076923076923e-06,
"loss": 0.4378,
"step": 42
},
{
"epoch": 0.08256,
"grad_norm": 0.7043813467025757,
"learning_rate": 2.756410256410257e-06,
"loss": 0.3878,
"step": 43
},
{
"epoch": 0.08448,
"grad_norm": 0.8697289228439331,
"learning_rate": 2.8205128205128207e-06,
"loss": 0.4542,
"step": 44
},
{
"epoch": 0.0864,
"grad_norm": 0.7183628082275391,
"learning_rate": 2.8846153846153845e-06,
"loss": 0.3775,
"step": 45
},
{
"epoch": 0.08832,
"grad_norm": 0.7884376049041748,
"learning_rate": 2.948717948717949e-06,
"loss": 0.4123,
"step": 46
},
{
"epoch": 0.09024,
"grad_norm": 0.7274856567382812,
"learning_rate": 3.012820512820513e-06,
"loss": 0.35,
"step": 47
},
{
"epoch": 0.09216,
"grad_norm": 0.7103409171104431,
"learning_rate": 3.0769230769230774e-06,
"loss": 0.4492,
"step": 48
},
{
"epoch": 0.09408,
"grad_norm": 0.7372768521308899,
"learning_rate": 3.141025641025641e-06,
"loss": 0.3737,
"step": 49
},
{
"epoch": 0.096,
"grad_norm": 0.7264636158943176,
"learning_rate": 3.205128205128206e-06,
"loss": 0.3778,
"step": 50
},
{
"epoch": 0.09792,
"grad_norm": 0.7514247894287109,
"learning_rate": 3.2692307692307696e-06,
"loss": 0.3409,
"step": 51
},
{
"epoch": 0.09984,
"grad_norm": 0.6889472007751465,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.365,
"step": 52
},
{
"epoch": 0.10176,
"grad_norm": 0.740145742893219,
"learning_rate": 3.397435897435898e-06,
"loss": 0.3674,
"step": 53
},
{
"epoch": 0.10368,
"grad_norm": 0.7058961391448975,
"learning_rate": 3.4615384615384617e-06,
"loss": 0.402,
"step": 54
},
{
"epoch": 0.1056,
"grad_norm": 0.7002719044685364,
"learning_rate": 3.5256410256410263e-06,
"loss": 0.4241,
"step": 55
},
{
"epoch": 0.10752,
"grad_norm": 0.7025837302207947,
"learning_rate": 3.58974358974359e-06,
"loss": 0.3368,
"step": 56
},
{
"epoch": 0.10944,
"grad_norm": 0.7108639478683472,
"learning_rate": 3.653846153846154e-06,
"loss": 0.4519,
"step": 57
},
{
"epoch": 0.11136,
"grad_norm": 0.7567515969276428,
"learning_rate": 3.7179487179487184e-06,
"loss": 0.3824,
"step": 58
},
{
"epoch": 0.11328,
"grad_norm": 0.6944862604141235,
"learning_rate": 3.782051282051282e-06,
"loss": 0.3757,
"step": 59
},
{
"epoch": 0.1152,
"grad_norm": 0.6916061043739319,
"learning_rate": 3.846153846153847e-06,
"loss": 0.3943,
"step": 60
},
{
"epoch": 0.11712,
"grad_norm": 0.6254149675369263,
"learning_rate": 3.910256410256411e-06,
"loss": 0.3524,
"step": 61
},
{
"epoch": 0.11904,
"grad_norm": 0.6490861177444458,
"learning_rate": 3.974358974358974e-06,
"loss": 0.4087,
"step": 62
},
{
"epoch": 0.12096,
"grad_norm": 0.7554753422737122,
"learning_rate": 4.0384615384615385e-06,
"loss": 0.4162,
"step": 63
},
{
"epoch": 0.12288,
"grad_norm": 0.6378677487373352,
"learning_rate": 4.102564102564103e-06,
"loss": 0.3297,
"step": 64
},
{
"epoch": 0.1248,
"grad_norm": 0.6192220449447632,
"learning_rate": 4.166666666666667e-06,
"loss": 0.3619,
"step": 65
},
{
"epoch": 0.12672,
"grad_norm": 0.735885500907898,
"learning_rate": 4.230769230769231e-06,
"loss": 0.3987,
"step": 66
},
{
"epoch": 0.12864,
"grad_norm": 0.6629100441932678,
"learning_rate": 4.294871794871795e-06,
"loss": 0.3747,
"step": 67
},
{
"epoch": 0.13056,
"grad_norm": 0.6441377401351929,
"learning_rate": 4.358974358974359e-06,
"loss": 0.3468,
"step": 68
},
{
"epoch": 0.13248,
"grad_norm": 0.6883382201194763,
"learning_rate": 4.423076923076924e-06,
"loss": 0.3368,
"step": 69
},
{
"epoch": 0.1344,
"grad_norm": 0.6299349069595337,
"learning_rate": 4.487179487179488e-06,
"loss": 0.3768,
"step": 70
},
{
"epoch": 0.13632,
"grad_norm": 0.7488002181053162,
"learning_rate": 4.551282051282052e-06,
"loss": 0.3929,
"step": 71
},
{
"epoch": 0.13824,
"grad_norm": 0.6446295976638794,
"learning_rate": 4.615384615384616e-06,
"loss": 0.3482,
"step": 72
},
{
"epoch": 0.14016,
"grad_norm": 0.6761082410812378,
"learning_rate": 4.6794871794871795e-06,
"loss": 0.4185,
"step": 73
},
{
"epoch": 0.14208,
"grad_norm": 0.6064295172691345,
"learning_rate": 4.743589743589744e-06,
"loss": 0.3357,
"step": 74
},
{
"epoch": 0.144,
"grad_norm": 0.7098361253738403,
"learning_rate": 4.807692307692308e-06,
"loss": 0.3894,
"step": 75
},
{
"epoch": 0.14592,
"grad_norm": 0.7388235330581665,
"learning_rate": 4.871794871794872e-06,
"loss": 0.3931,
"step": 76
},
{
"epoch": 0.14784,
"grad_norm": 0.6687644720077515,
"learning_rate": 4.935897435897436e-06,
"loss": 0.3361,
"step": 77
},
{
"epoch": 0.14976,
"grad_norm": 0.6570240259170532,
"learning_rate": 5e-06,
"loss": 0.3899,
"step": 78
},
{
"epoch": 0.15168,
"grad_norm": 0.6812373399734497,
"learning_rate": 5.064102564102565e-06,
"loss": 0.4303,
"step": 79
},
{
"epoch": 0.1536,
"grad_norm": 0.6605876088142395,
"learning_rate": 5.128205128205128e-06,
"loss": 0.4059,
"step": 80
},
{
"epoch": 0.15552,
"grad_norm": 0.7260798215866089,
"learning_rate": 5.192307692307693e-06,
"loss": 0.4142,
"step": 81
},
{
"epoch": 0.15744,
"grad_norm": 0.622312068939209,
"learning_rate": 5.256410256410257e-06,
"loss": 0.3255,
"step": 82
},
{
"epoch": 0.15936,
"grad_norm": 0.6395354866981506,
"learning_rate": 5.320512820512821e-06,
"loss": 0.364,
"step": 83
},
{
"epoch": 0.16128,
"grad_norm": 0.6753534078598022,
"learning_rate": 5.384615384615385e-06,
"loss": 0.3449,
"step": 84
},
{
"epoch": 0.1632,
"grad_norm": 0.6522305011749268,
"learning_rate": 5.448717948717949e-06,
"loss": 0.4236,
"step": 85
},
{
"epoch": 0.16512,
"grad_norm": 0.6371592879295349,
"learning_rate": 5.512820512820514e-06,
"loss": 0.3897,
"step": 86
},
{
"epoch": 0.16704,
"grad_norm": 0.7068943381309509,
"learning_rate": 5.576923076923077e-06,
"loss": 0.4138,
"step": 87
},
{
"epoch": 0.16896,
"grad_norm": 0.5763352513313293,
"learning_rate": 5.641025641025641e-06,
"loss": 0.3146,
"step": 88
},
{
"epoch": 0.17088,
"grad_norm": 0.6121231913566589,
"learning_rate": 5.705128205128206e-06,
"loss": 0.3726,
"step": 89
},
{
"epoch": 0.1728,
"grad_norm": 0.6330202221870422,
"learning_rate": 5.769230769230769e-06,
"loss": 0.3916,
"step": 90
},
{
"epoch": 0.17472,
"grad_norm": 0.644561767578125,
"learning_rate": 5.833333333333334e-06,
"loss": 0.335,
"step": 91
},
{
"epoch": 0.17664,
"grad_norm": 0.646582305431366,
"learning_rate": 5.897435897435898e-06,
"loss": 0.3569,
"step": 92
},
{
"epoch": 0.17856,
"grad_norm": 0.6808898448944092,
"learning_rate": 5.961538461538462e-06,
"loss": 0.4036,
"step": 93
},
{
"epoch": 0.18048,
"grad_norm": 0.7068670988082886,
"learning_rate": 6.025641025641026e-06,
"loss": 0.4183,
"step": 94
},
{
"epoch": 0.1824,
"grad_norm": 0.6909053325653076,
"learning_rate": 6.08974358974359e-06,
"loss": 0.3802,
"step": 95
},
{
"epoch": 0.18432,
"grad_norm": 0.7508571147918701,
"learning_rate": 6.153846153846155e-06,
"loss": 0.4411,
"step": 96
},
{
"epoch": 0.18624,
"grad_norm": 0.6321552991867065,
"learning_rate": 6.217948717948718e-06,
"loss": 0.406,
"step": 97
},
{
"epoch": 0.18816,
"grad_norm": 0.659465491771698,
"learning_rate": 6.282051282051282e-06,
"loss": 0.3875,
"step": 98
},
{
"epoch": 0.19008,
"grad_norm": 0.637205183506012,
"learning_rate": 6.3461538461538466e-06,
"loss": 0.3807,
"step": 99
},
{
"epoch": 0.192,
"grad_norm": 0.719562292098999,
"learning_rate": 6.410256410256412e-06,
"loss": 0.3506,
"step": 100
},
{
"epoch": 0.19392,
"grad_norm": 0.5813566446304321,
"learning_rate": 6.474358974358975e-06,
"loss": 0.3502,
"step": 101
},
{
"epoch": 0.19584,
"grad_norm": 0.6817489862442017,
"learning_rate": 6.538461538461539e-06,
"loss": 0.3744,
"step": 102
},
{
"epoch": 0.19776,
"grad_norm": 0.6444496512413025,
"learning_rate": 6.602564102564103e-06,
"loss": 0.341,
"step": 103
},
{
"epoch": 0.19968,
"grad_norm": 0.6663426756858826,
"learning_rate": 6.666666666666667e-06,
"loss": 0.3789,
"step": 104
},
{
"epoch": 0.2016,
"grad_norm": 0.579590380191803,
"learning_rate": 6.730769230769232e-06,
"loss": 0.3247,
"step": 105
},
{
"epoch": 0.20352,
"grad_norm": 0.6688209176063538,
"learning_rate": 6.794871794871796e-06,
"loss": 0.3625,
"step": 106
},
{
"epoch": 0.20544,
"grad_norm": 0.6307254433631897,
"learning_rate": 6.858974358974359e-06,
"loss": 0.3599,
"step": 107
},
{
"epoch": 0.20736,
"grad_norm": 0.7129335999488831,
"learning_rate": 6.923076923076923e-06,
"loss": 0.3682,
"step": 108
},
{
"epoch": 0.20928,
"grad_norm": 0.6203235387802124,
"learning_rate": 6.9871794871794876e-06,
"loss": 0.3091,
"step": 109
},
{
"epoch": 0.2112,
"grad_norm": 0.6879836320877075,
"learning_rate": 7.051282051282053e-06,
"loss": 0.412,
"step": 110
},
{
"epoch": 0.21312,
"grad_norm": 0.6465985774993896,
"learning_rate": 7.115384615384616e-06,
"loss": 0.3901,
"step": 111
},
{
"epoch": 0.21504,
"grad_norm": 0.7844033241271973,
"learning_rate": 7.17948717948718e-06,
"loss": 0.4702,
"step": 112
},
{
"epoch": 0.21696,
"grad_norm": 0.7305936813354492,
"learning_rate": 7.243589743589744e-06,
"loss": 0.3689,
"step": 113
},
{
"epoch": 0.21888,
"grad_norm": 0.6698340177536011,
"learning_rate": 7.307692307692308e-06,
"loss": 0.4069,
"step": 114
},
{
"epoch": 0.2208,
"grad_norm": 0.6539076566696167,
"learning_rate": 7.371794871794873e-06,
"loss": 0.3722,
"step": 115
},
{
"epoch": 0.22272,
"grad_norm": 0.711211085319519,
"learning_rate": 7.435897435897437e-06,
"loss": 0.377,
"step": 116
},
{
"epoch": 0.22464,
"grad_norm": 0.8287427425384521,
"learning_rate": 7.500000000000001e-06,
"loss": 0.4612,
"step": 117
},
{
"epoch": 0.22656,
"grad_norm": 0.6670370697975159,
"learning_rate": 7.564102564102564e-06,
"loss": 0.3779,
"step": 118
},
{
"epoch": 0.22848,
"grad_norm": 0.7517879009246826,
"learning_rate": 7.6282051282051286e-06,
"loss": 0.4233,
"step": 119
},
{
"epoch": 0.2304,
"grad_norm": 0.6955645680427551,
"learning_rate": 7.692307692307694e-06,
"loss": 0.4319,
"step": 120
},
{
"epoch": 0.23232,
"grad_norm": 0.7018867135047913,
"learning_rate": 7.756410256410258e-06,
"loss": 0.431,
"step": 121
},
{
"epoch": 0.23424,
"grad_norm": 0.7604333162307739,
"learning_rate": 7.820512820512822e-06,
"loss": 0.363,
"step": 122
},
{
"epoch": 0.23616,
"grad_norm": 0.68422532081604,
"learning_rate": 7.884615384615384e-06,
"loss": 0.348,
"step": 123
},
{
"epoch": 0.23808,
"grad_norm": 0.6258844137191772,
"learning_rate": 7.948717948717949e-06,
"loss": 0.3133,
"step": 124
},
{
"epoch": 0.24,
"grad_norm": 0.704030454158783,
"learning_rate": 8.012820512820515e-06,
"loss": 0.4137,
"step": 125
},
{
"epoch": 0.24192,
"grad_norm": 0.7738426923751831,
"learning_rate": 8.076923076923077e-06,
"loss": 0.3761,
"step": 126
},
{
"epoch": 0.24384,
"grad_norm": 0.700972855091095,
"learning_rate": 8.141025641025641e-06,
"loss": 0.4211,
"step": 127
},
{
"epoch": 0.24576,
"grad_norm": 0.6561170816421509,
"learning_rate": 8.205128205128205e-06,
"loss": 0.3754,
"step": 128
},
{
"epoch": 0.24768,
"grad_norm": 0.6498628258705139,
"learning_rate": 8.26923076923077e-06,
"loss": 0.3719,
"step": 129
},
{
"epoch": 0.2496,
"grad_norm": 0.6547404527664185,
"learning_rate": 8.333333333333334e-06,
"loss": 0.3556,
"step": 130
},
{
"epoch": 0.25152,
"grad_norm": 0.6710948944091797,
"learning_rate": 8.397435897435898e-06,
"loss": 0.3646,
"step": 131
},
{
"epoch": 0.25344,
"grad_norm": 0.6932823657989502,
"learning_rate": 8.461538461538462e-06,
"loss": 0.39,
"step": 132
},
{
"epoch": 0.25536,
"grad_norm": 0.6260711550712585,
"learning_rate": 8.525641025641026e-06,
"loss": 0.3579,
"step": 133
},
{
"epoch": 0.25728,
"grad_norm": 0.6835085153579712,
"learning_rate": 8.58974358974359e-06,
"loss": 0.3747,
"step": 134
},
{
"epoch": 0.2592,
"grad_norm": 0.7214824557304382,
"learning_rate": 8.653846153846155e-06,
"loss": 0.3838,
"step": 135
},
{
"epoch": 0.26112,
"grad_norm": 0.6528518795967102,
"learning_rate": 8.717948717948719e-06,
"loss": 0.4058,
"step": 136
},
{
"epoch": 0.26304,
"grad_norm": 0.7477070093154907,
"learning_rate": 8.782051282051283e-06,
"loss": 0.4227,
"step": 137
},
{
"epoch": 0.26496,
"grad_norm": 0.5661771297454834,
"learning_rate": 8.846153846153847e-06,
"loss": 0.3599,
"step": 138
},
{
"epoch": 0.26688,
"grad_norm": 0.6169576048851013,
"learning_rate": 8.910256410256411e-06,
"loss": 0.3574,
"step": 139
},
{
"epoch": 0.2688,
"grad_norm": 0.6223636865615845,
"learning_rate": 8.974358974358976e-06,
"loss": 0.4166,
"step": 140
},
{
"epoch": 0.27072,
"grad_norm": 0.6733807325363159,
"learning_rate": 9.03846153846154e-06,
"loss": 0.3657,
"step": 141
},
{
"epoch": 0.27264,
"grad_norm": 0.6087719202041626,
"learning_rate": 9.102564102564104e-06,
"loss": 0.3204,
"step": 142
},
{
"epoch": 0.27456,
"grad_norm": 0.694625198841095,
"learning_rate": 9.166666666666666e-06,
"loss": 0.351,
"step": 143
},
{
"epoch": 0.27648,
"grad_norm": 0.7046074867248535,
"learning_rate": 9.230769230769232e-06,
"loss": 0.4015,
"step": 144
},
{
"epoch": 0.2784,
"grad_norm": 0.53879314661026,
"learning_rate": 9.294871794871796e-06,
"loss": 0.3399,
"step": 145
},
{
"epoch": 0.28032,
"grad_norm": 0.6909272074699402,
"learning_rate": 9.358974358974359e-06,
"loss": 0.3791,
"step": 146
},
{
"epoch": 0.28224,
"grad_norm": 0.771980345249176,
"learning_rate": 9.423076923076923e-06,
"loss": 0.413,
"step": 147
},
{
"epoch": 0.28416,
"grad_norm": 0.6175732016563416,
"learning_rate": 9.487179487179487e-06,
"loss": 0.4188,
"step": 148
},
{
"epoch": 0.28608,
"grad_norm": 0.6606230139732361,
"learning_rate": 9.551282051282053e-06,
"loss": 0.383,
"step": 149
},
{
"epoch": 0.288,
"grad_norm": 0.6148602366447449,
"learning_rate": 9.615384615384616e-06,
"loss": 0.3859,
"step": 150
},
{
"epoch": 0.28992,
"grad_norm": 0.5951830148696899,
"learning_rate": 9.67948717948718e-06,
"loss": 0.3388,
"step": 151
},
{
"epoch": 0.29184,
"grad_norm": 0.6593315005302429,
"learning_rate": 9.743589743589744e-06,
"loss": 0.3527,
"step": 152
},
{
"epoch": 0.29376,
"grad_norm": 0.7437178492546082,
"learning_rate": 9.807692307692308e-06,
"loss": 0.4114,
"step": 153
},
{
"epoch": 0.29568,
"grad_norm": 0.6679487824440002,
"learning_rate": 9.871794871794872e-06,
"loss": 0.3403,
"step": 154
},
{
"epoch": 0.2976,
"grad_norm": 0.6399834752082825,
"learning_rate": 9.935897435897437e-06,
"loss": 0.4467,
"step": 155
},
{
"epoch": 0.29952,
"grad_norm": 0.7697778940200806,
"learning_rate": 1e-05,
"loss": 0.4213,
"step": 156
},
{
"epoch": 0.30144,
"grad_norm": 0.6772565841674805,
"learning_rate": 9.999987482852865e-06,
"loss": 0.4109,
"step": 157
},
{
"epoch": 0.30336,
"grad_norm": 0.6980816125869751,
"learning_rate": 9.99994993147413e-06,
"loss": 0.4001,
"step": 158
},
{
"epoch": 0.30528,
"grad_norm": 0.6596171259880066,
"learning_rate": 9.99988734605181e-06,
"loss": 0.3473,
"step": 159
},
{
"epoch": 0.3072,
"grad_norm": 0.6155123710632324,
"learning_rate": 9.999799726899261e-06,
"loss": 0.3496,
"step": 160
},
{
"epoch": 0.30912,
"grad_norm": 0.636191189289093,
"learning_rate": 9.999687074455182e-06,
"loss": 0.3747,
"step": 161
},
{
"epoch": 0.31104,
"grad_norm": 0.6016921401023865,
"learning_rate": 9.999549389283605e-06,
"loss": 0.339,
"step": 162
},
{
"epoch": 0.31296,
"grad_norm": 0.7649242281913757,
"learning_rate": 9.999386672073902e-06,
"loss": 0.3465,
"step": 163
},
{
"epoch": 0.31488,
"grad_norm": 0.6430949568748474,
"learning_rate": 9.999198923640774e-06,
"loss": 0.4259,
"step": 164
},
{
"epoch": 0.3168,
"grad_norm": 0.6943743228912354,
"learning_rate": 9.998986144924253e-06,
"loss": 0.3585,
"step": 165
},
{
"epoch": 0.31872,
"grad_norm": 0.6267467737197876,
"learning_rate": 9.998748336989687e-06,
"loss": 0.3535,
"step": 166
},
{
"epoch": 0.32064,
"grad_norm": 0.7696426510810852,
"learning_rate": 9.998485501027752e-06,
"loss": 0.3812,
"step": 167
},
{
"epoch": 0.32256,
"grad_norm": 0.5812033414840698,
"learning_rate": 9.998197638354428e-06,
"loss": 0.3179,
"step": 168
},
{
"epoch": 0.32448,
"grad_norm": 0.6402625441551208,
"learning_rate": 9.997884750411004e-06,
"loss": 0.3367,
"step": 169
},
{
"epoch": 0.3264,
"grad_norm": 0.7286834716796875,
"learning_rate": 9.997546838764066e-06,
"loss": 0.376,
"step": 170
},
{
"epoch": 0.32832,
"grad_norm": 0.6623519659042358,
"learning_rate": 9.997183905105487e-06,
"loss": 0.3745,
"step": 171
},
{
"epoch": 0.33024,
"grad_norm": 0.6471444964408875,
"learning_rate": 9.996795951252427e-06,
"loss": 0.3271,
"step": 172
},
{
"epoch": 0.33216,
"grad_norm": 0.7801302671432495,
"learning_rate": 9.996382979147316e-06,
"loss": 0.3865,
"step": 173
},
{
"epoch": 0.33408,
"grad_norm": 0.7685811519622803,
"learning_rate": 9.995944990857848e-06,
"loss": 0.4091,
"step": 174
},
{
"epoch": 0.336,
"grad_norm": 0.6229328513145447,
"learning_rate": 9.995481988576968e-06,
"loss": 0.4266,
"step": 175
},
{
"epoch": 0.33792,
"grad_norm": 0.715920627117157,
"learning_rate": 9.994993974622863e-06,
"loss": 0.318,
"step": 176
},
{
"epoch": 0.33984,
"grad_norm": 0.7303981781005859,
"learning_rate": 9.994480951438948e-06,
"loss": 0.3555,
"step": 177
},
{
"epoch": 0.34176,
"grad_norm": 0.5643477439880371,
"learning_rate": 9.993942921593858e-06,
"loss": 0.3104,
"step": 178
},
{
"epoch": 0.34368,
"grad_norm": 0.6063923835754395,
"learning_rate": 9.993379887781436e-06,
"loss": 0.3357,
"step": 179
},
{
"epoch": 0.3456,
"grad_norm": 0.6988375782966614,
"learning_rate": 9.992791852820709e-06,
"loss": 0.3652,
"step": 180
},
{
"epoch": 0.34752,
"grad_norm": 0.7563511729240417,
"learning_rate": 9.992178819655889e-06,
"loss": 0.3708,
"step": 181
},
{
"epoch": 0.34944,
"grad_norm": 0.6427266597747803,
"learning_rate": 9.991540791356342e-06,
"loss": 0.4065,
"step": 182
},
{
"epoch": 0.35136,
"grad_norm": 0.6238411068916321,
"learning_rate": 9.990877771116588e-06,
"loss": 0.3559,
"step": 183
},
{
"epoch": 0.35328,
"grad_norm": 0.6406944394111633,
"learning_rate": 9.990189762256275e-06,
"loss": 0.369,
"step": 184
},
{
"epoch": 0.3552,
"grad_norm": 0.6363815069198608,
"learning_rate": 9.98947676822017e-06,
"loss": 0.4005,
"step": 185
},
{
"epoch": 0.35712,
"grad_norm": 0.6001457571983337,
"learning_rate": 9.988738792578126e-06,
"loss": 0.3567,
"step": 186
},
{
"epoch": 0.35904,
"grad_norm": 0.6912431716918945,
"learning_rate": 9.987975839025091e-06,
"loss": 0.4575,
"step": 187
},
{
"epoch": 0.36096,
"grad_norm": 0.6562398672103882,
"learning_rate": 9.987187911381059e-06,
"loss": 0.4193,
"step": 188
},
{
"epoch": 0.36288,
"grad_norm": 0.6208478808403015,
"learning_rate": 9.986375013591077e-06,
"loss": 0.4666,
"step": 189
},
{
"epoch": 0.3648,
"grad_norm": 0.6140576004981995,
"learning_rate": 9.985537149725207e-06,
"loss": 0.4211,
"step": 190
},
{
"epoch": 0.36672,
"grad_norm": 0.6628037691116333,
"learning_rate": 9.984674323978517e-06,
"loss": 0.3883,
"step": 191
},
{
"epoch": 0.36864,
"grad_norm": 0.6753054857254028,
"learning_rate": 9.983786540671052e-06,
"loss": 0.3363,
"step": 192
},
{
"epoch": 0.37056,
"grad_norm": 0.6120296716690063,
"learning_rate": 9.982873804247817e-06,
"loss": 0.3693,
"step": 193
},
{
"epoch": 0.37248,
"grad_norm": 0.6496464610099792,
"learning_rate": 9.981936119278758e-06,
"loss": 0.4039,
"step": 194
},
{
"epoch": 0.3744,
"grad_norm": 0.6185566186904907,
"learning_rate": 9.980973490458728e-06,
"loss": 0.3693,
"step": 195
},
{
"epoch": 0.37632,
"grad_norm": 0.6195867657661438,
"learning_rate": 9.979985922607476e-06,
"loss": 0.3451,
"step": 196
},
{
"epoch": 0.37824,
"grad_norm": 0.6248885989189148,
"learning_rate": 9.978973420669613e-06,
"loss": 0.4278,
"step": 197
},
{
"epoch": 0.38016,
"grad_norm": 0.5913709402084351,
"learning_rate": 9.977935989714594e-06,
"loss": 0.3848,
"step": 198
},
{
"epoch": 0.38208,
"grad_norm": 0.6100884079933167,
"learning_rate": 9.97687363493669e-06,
"loss": 0.394,
"step": 199
},
{
"epoch": 0.384,
"grad_norm": 0.6409049034118652,
"learning_rate": 9.975786361654959e-06,
"loss": 0.3358,
"step": 200
},
{
"epoch": 0.38592,
"grad_norm": 0.6062589883804321,
"learning_rate": 9.974674175313228e-06,
"loss": 0.3622,
"step": 201
},
{
"epoch": 0.38784,
"grad_norm": 0.6187102794647217,
"learning_rate": 9.973537081480056e-06,
"loss": 0.3846,
"step": 202
},
{
"epoch": 0.38976,
"grad_norm": 0.607683002948761,
"learning_rate": 9.972375085848712e-06,
"loss": 0.3497,
"step": 203
},
{
"epoch": 0.39168,
"grad_norm": 0.6635757088661194,
"learning_rate": 9.971188194237141e-06,
"loss": 0.37,
"step": 204
},
{
"epoch": 0.3936,
"grad_norm": 0.6417316198348999,
"learning_rate": 9.969976412587943e-06,
"loss": 0.3652,
"step": 205
},
{
"epoch": 0.39552,
"grad_norm": 0.5575540661811829,
"learning_rate": 9.96873974696834e-06,
"loss": 0.366,
"step": 206
},
{
"epoch": 0.39744,
"grad_norm": 0.635612964630127,
"learning_rate": 9.967478203570143e-06,
"loss": 0.3554,
"step": 207
},
{
"epoch": 0.39936,
"grad_norm": 0.6379068493843079,
"learning_rate": 9.966191788709716e-06,
"loss": 0.4089,
"step": 208
},
{
"epoch": 0.40128,
"grad_norm": 0.6174411177635193,
"learning_rate": 9.96488050882796e-06,
"loss": 0.4,
"step": 209
},
{
"epoch": 0.4032,
"grad_norm": 0.6518369317054749,
"learning_rate": 9.96354437049027e-06,
"loss": 0.411,
"step": 210
},
{
"epoch": 0.40512,
"grad_norm": 0.5786463022232056,
"learning_rate": 9.9621833803865e-06,
"loss": 0.3982,
"step": 211
},
{
"epoch": 0.40704,
"grad_norm": 0.6515635251998901,
"learning_rate": 9.960797545330936e-06,
"loss": 0.3934,
"step": 212
},
{
"epoch": 0.40896,
"grad_norm": 0.584307849407196,
"learning_rate": 9.95938687226226e-06,
"loss": 0.3622,
"step": 213
},
{
"epoch": 0.41088,
"grad_norm": 0.6867027282714844,
"learning_rate": 9.95795136824351e-06,
"loss": 0.4684,
"step": 214
},
{
"epoch": 0.4128,
"grad_norm": 0.6356746554374695,
"learning_rate": 9.956491040462051e-06,
"loss": 0.4206,
"step": 215
},
{
"epoch": 0.41472,
"grad_norm": 0.6305949091911316,
"learning_rate": 9.955005896229543e-06,
"loss": 0.3357,
"step": 216
},
{
"epoch": 0.41664,
"grad_norm": 0.5713821649551392,
"learning_rate": 9.95349594298189e-06,
"loss": 0.4133,
"step": 217
},
{
"epoch": 0.41856,
"grad_norm": 0.5651805996894836,
"learning_rate": 9.951961188279216e-06,
"loss": 0.3465,
"step": 218
},
{
"epoch": 0.42048,
"grad_norm": 0.5878507494926453,
"learning_rate": 9.950401639805822e-06,
"loss": 0.3639,
"step": 219
},
{
"epoch": 0.4224,
"grad_norm": 0.6849126815795898,
"learning_rate": 9.948817305370145e-06,
"loss": 0.3477,
"step": 220
},
{
"epoch": 0.42432,
"grad_norm": 0.6204988360404968,
"learning_rate": 9.947208192904722e-06,
"loss": 0.3858,
"step": 221
},
{
"epoch": 0.42624,
"grad_norm": 0.5640665292739868,
"learning_rate": 9.94557431046616e-06,
"loss": 0.3791,
"step": 222
},
{
"epoch": 0.42816,
"grad_norm": 0.5927354097366333,
"learning_rate": 9.943915666235068e-06,
"loss": 0.3849,
"step": 223
},
{
"epoch": 0.43008,
"grad_norm": 0.5869733691215515,
"learning_rate": 9.942232268516051e-06,
"loss": 0.3699,
"step": 224
},
{
"epoch": 0.432,
"grad_norm": 0.6834899187088013,
"learning_rate": 9.940524125737641e-06,
"loss": 0.4146,
"step": 225
},
{
"epoch": 0.43392,
"grad_norm": 0.5563869476318359,
"learning_rate": 9.938791246452267e-06,
"loss": 0.339,
"step": 226
},
{
"epoch": 0.43584,
"grad_norm": 0.5262661576271057,
"learning_rate": 9.937033639336212e-06,
"loss": 0.3316,
"step": 227
},
{
"epoch": 0.43776,
"grad_norm": 0.5714774131774902,
"learning_rate": 9.935251313189564e-06,
"loss": 0.3735,
"step": 228
},
{
"epoch": 0.43968,
"grad_norm": 0.575494110584259,
"learning_rate": 9.933444276936185e-06,
"loss": 0.3875,
"step": 229
},
{
"epoch": 0.4416,
"grad_norm": 0.5748825073242188,
"learning_rate": 9.931612539623643e-06,
"loss": 0.3473,
"step": 230
},
{
"epoch": 0.44352,
"grad_norm": 0.5654993653297424,
"learning_rate": 9.929756110423195e-06,
"loss": 0.3526,
"step": 231
},
{
"epoch": 0.44544,
"grad_norm": 0.5969668030738831,
"learning_rate": 9.927874998629714e-06,
"loss": 0.3731,
"step": 232
},
{
"epoch": 0.44736,
"grad_norm": 0.5902429819107056,
"learning_rate": 9.925969213661664e-06,
"loss": 0.3484,
"step": 233
},
{
"epoch": 0.44928,
"grad_norm": 0.62022465467453,
"learning_rate": 9.924038765061042e-06,
"loss": 0.4126,
"step": 234
},
{
"epoch": 0.4512,
"grad_norm": 0.6245263814926147,
"learning_rate": 9.92208366249333e-06,
"loss": 0.3964,
"step": 235
},
{
"epoch": 0.45312,
"grad_norm": 0.5946273803710938,
"learning_rate": 9.920103915747452e-06,
"loss": 0.3631,
"step": 236
},
{
"epoch": 0.45504,
"grad_norm": 0.5720643997192383,
"learning_rate": 9.91809953473572e-06,
"loss": 0.3708,
"step": 237
},
{
"epoch": 0.45696,
"grad_norm": 0.6433354616165161,
"learning_rate": 9.916070529493785e-06,
"loss": 0.4049,
"step": 238
},
{
"epoch": 0.45888,
"grad_norm": 0.5687940120697021,
"learning_rate": 9.914016910180593e-06,
"loss": 0.3203,
"step": 239
},
{
"epoch": 0.4608,
"grad_norm": 0.5873810052871704,
"learning_rate": 9.911938687078324e-06,
"loss": 0.3894,
"step": 240
},
{
"epoch": 0.46272,
"grad_norm": 0.5599617958068848,
"learning_rate": 9.90983587059235e-06,
"loss": 0.3379,
"step": 241
},
{
"epoch": 0.46464,
"grad_norm": 0.534267008304596,
"learning_rate": 9.907708471251173e-06,
"loss": 0.3406,
"step": 242
},
{
"epoch": 0.46656,
"grad_norm": 0.5943034291267395,
"learning_rate": 9.905556499706384e-06,
"loss": 0.3379,
"step": 243
},
{
"epoch": 0.46848,
"grad_norm": 0.5704087018966675,
"learning_rate": 9.9033799667326e-06,
"loss": 0.3607,
"step": 244
},
{
"epoch": 0.4704,
"grad_norm": 0.5806458592414856,
"learning_rate": 9.901178883227413e-06,
"loss": 0.365,
"step": 245
},
{
"epoch": 0.47232,
"grad_norm": 0.5489027500152588,
"learning_rate": 9.89895326021134e-06,
"loss": 0.4215,
"step": 246
},
{
"epoch": 0.47424,
"grad_norm": 0.6265918016433716,
"learning_rate": 9.896703108827758e-06,
"loss": 0.376,
"step": 247
},
{
"epoch": 0.47616,
"grad_norm": 0.5624193549156189,
"learning_rate": 9.89442844034286e-06,
"loss": 0.4133,
"step": 248
},
{
"epoch": 0.47808,
"grad_norm": 0.5382794141769409,
"learning_rate": 9.89212926614559e-06,
"loss": 0.3267,
"step": 249
},
{
"epoch": 0.48,
"grad_norm": 0.5647448301315308,
"learning_rate": 9.889805597747588e-06,
"loss": 0.3719,
"step": 250
},
{
"epoch": 0.48192,
"grad_norm": 0.5812395811080933,
"learning_rate": 9.887457446783133e-06,
"loss": 0.4172,
"step": 251
},
{
"epoch": 0.48384,
"grad_norm": 0.5985914468765259,
"learning_rate": 9.885084825009085e-06,
"loss": 0.3827,
"step": 252
},
{
"epoch": 0.48576,
"grad_norm": 0.6193224787712097,
"learning_rate": 9.88268774430483e-06,
"loss": 0.4342,
"step": 253
},
{
"epoch": 0.48768,
"grad_norm": 0.5904422402381897,
"learning_rate": 9.88026621667221e-06,
"loss": 0.402,
"step": 254
},
{
"epoch": 0.4896,
"grad_norm": 0.4797731637954712,
"learning_rate": 9.87782025423547e-06,
"loss": 0.3065,
"step": 255
},
{
"epoch": 0.49152,
"grad_norm": 0.572597324848175,
"learning_rate": 9.875349869241202e-06,
"loss": 0.4436,
"step": 256
},
{
"epoch": 0.49344,
"grad_norm": 0.556987464427948,
"learning_rate": 9.872855074058274e-06,
"loss": 0.371,
"step": 257
},
{
"epoch": 0.49536,
"grad_norm": 0.6326332092285156,
"learning_rate": 9.870335881177774e-06,
"loss": 0.4551,
"step": 258
},
{
"epoch": 0.49728,
"grad_norm": 0.5439813733100891,
"learning_rate": 9.867792303212944e-06,
"loss": 0.3429,
"step": 259
},
{
"epoch": 0.4992,
"grad_norm": 0.5344077944755554,
"learning_rate": 9.86522435289912e-06,
"loss": 0.3454,
"step": 260
},
{
"epoch": 0.50112,
"grad_norm": 0.5606799125671387,
"learning_rate": 9.862632043093667e-06,
"loss": 0.3616,
"step": 261
},
{
"epoch": 0.50304,
"grad_norm": 0.5990909934043884,
"learning_rate": 9.860015386775915e-06,
"loss": 0.3964,
"step": 262
},
{
"epoch": 0.50496,
"grad_norm": 0.5721217393875122,
"learning_rate": 9.857374397047093e-06,
"loss": 0.3292,
"step": 263
},
{
"epoch": 0.50688,
"grad_norm": 0.5306844115257263,
"learning_rate": 9.854709087130261e-06,
"loss": 0.3675,
"step": 264
},
{
"epoch": 0.5088,
"grad_norm": 0.5205779075622559,
"learning_rate": 9.852019470370254e-06,
"loss": 0.3703,
"step": 265
},
{
"epoch": 0.51072,
"grad_norm": 0.5410917401313782,
"learning_rate": 9.849305560233598e-06,
"loss": 0.3435,
"step": 266
},
{
"epoch": 0.51264,
"grad_norm": 0.5912427306175232,
"learning_rate": 9.846567370308462e-06,
"loss": 0.3641,
"step": 267
},
{
"epoch": 0.51456,
"grad_norm": 0.60971999168396,
"learning_rate": 9.843804914304578e-06,
"loss": 0.3929,
"step": 268
},
{
"epoch": 0.51648,
"grad_norm": 0.5239255428314209,
"learning_rate": 9.841018206053167e-06,
"loss": 0.3576,
"step": 269
},
{
"epoch": 0.5184,
"grad_norm": 0.5572692155838013,
"learning_rate": 9.838207259506891e-06,
"loss": 0.3982,
"step": 270
},
{
"epoch": 0.52032,
"grad_norm": 0.5695377588272095,
"learning_rate": 9.835372088739758e-06,
"loss": 0.3756,
"step": 271
},
{
"epoch": 0.52224,
"grad_norm": 2.5319535732269287,
"learning_rate": 9.83251270794707e-06,
"loss": 0.5625,
"step": 272
},
{
"epoch": 0.52416,
"grad_norm": 0.6499341726303101,
"learning_rate": 9.829629131445342e-06,
"loss": 0.3593,
"step": 273
},
{
"epoch": 0.52608,
"grad_norm": 0.5447263717651367,
"learning_rate": 9.826721373672235e-06,
"loss": 0.3721,
"step": 274
},
{
"epoch": 0.528,
"grad_norm": 0.5670105218887329,
"learning_rate": 9.82378944918648e-06,
"loss": 0.3642,
"step": 275
},
{
"epoch": 0.52992,
"grad_norm": 0.565808117389679,
"learning_rate": 9.820833372667813e-06,
"loss": 0.4176,
"step": 276
},
{
"epoch": 0.53184,
"grad_norm": 0.5765930414199829,
"learning_rate": 9.817853158916889e-06,
"loss": 0.391,
"step": 277
},
{
"epoch": 0.53376,
"grad_norm": 1.2406972646713257,
"learning_rate": 9.814848822855216e-06,
"loss": 0.4144,
"step": 278
},
{
"epoch": 0.53568,
"grad_norm": 0.6053399443626404,
"learning_rate": 9.811820379525085e-06,
"loss": 0.4063,
"step": 279
},
{
"epoch": 0.5376,
"grad_norm": 0.6168642044067383,
"learning_rate": 9.80876784408948e-06,
"loss": 0.4,
"step": 280
},
{
"epoch": 0.53952,
"grad_norm": 0.5527163147926331,
"learning_rate": 9.805691231832018e-06,
"loss": 0.3778,
"step": 281
},
{
"epoch": 0.54144,
"grad_norm": 0.5546169877052307,
"learning_rate": 9.802590558156863e-06,
"loss": 0.4043,
"step": 282
},
{
"epoch": 0.54336,
"grad_norm": 0.5706309080123901,
"learning_rate": 9.799465838588646e-06,
"loss": 0.3627,
"step": 283
},
{
"epoch": 0.54528,
"grad_norm": 0.588741660118103,
"learning_rate": 9.796317088772402e-06,
"loss": 0.4261,
"step": 284
},
{
"epoch": 0.5472,
"grad_norm": 0.5763176679611206,
"learning_rate": 9.793144324473473e-06,
"loss": 0.3792,
"step": 285
},
{
"epoch": 0.54912,
"grad_norm": 0.5680968165397644,
"learning_rate": 9.789947561577445e-06,
"loss": 0.3641,
"step": 286
},
{
"epoch": 0.55104,
"grad_norm": 0.5095458626747131,
"learning_rate": 9.786726816090058e-06,
"loss": 0.3832,
"step": 287
},
{
"epoch": 0.55296,
"grad_norm": 0.6120963096618652,
"learning_rate": 9.783482104137127e-06,
"loss": 0.3997,
"step": 288
},
{
"epoch": 0.55488,
"grad_norm": 0.5892914533615112,
"learning_rate": 9.78021344196447e-06,
"loss": 0.3988,
"step": 289
},
{
"epoch": 0.5568,
"grad_norm": 0.5978222489356995,
"learning_rate": 9.776920845937816e-06,
"loss": 0.3914,
"step": 290
},
{
"epoch": 0.55872,
"grad_norm": 0.5431849956512451,
"learning_rate": 9.77360433254273e-06,
"loss": 0.3748,
"step": 291
},
{
"epoch": 0.56064,
"grad_norm": 0.5919901728630066,
"learning_rate": 9.770263918384523e-06,
"loss": 0.4183,
"step": 292
},
{
"epoch": 0.56256,
"grad_norm": 0.5523582100868225,
"learning_rate": 9.766899620188181e-06,
"loss": 0.3979,
"step": 293
},
{
"epoch": 0.56448,
"grad_norm": 0.8299493193626404,
"learning_rate": 9.763511454798268e-06,
"loss": 0.5165,
"step": 294
},
{
"epoch": 0.5664,
"grad_norm": 0.5611211061477661,
"learning_rate": 9.760099439178852e-06,
"loss": 0.4138,
"step": 295
},
{
"epoch": 0.56832,
"grad_norm": 0.5100602507591248,
"learning_rate": 9.75666359041341e-06,
"loss": 0.3241,
"step": 296
},
{
"epoch": 0.57024,
"grad_norm": 0.5935276746749878,
"learning_rate": 9.753203925704756e-06,
"loss": 0.4274,
"step": 297
},
{
"epoch": 0.57216,
"grad_norm": 0.5931862592697144,
"learning_rate": 9.749720462374939e-06,
"loss": 0.3813,
"step": 298
},
{
"epoch": 0.57408,
"grad_norm": 0.5815238952636719,
"learning_rate": 9.74621321786517e-06,
"loss": 0.3586,
"step": 299
},
{
"epoch": 0.576,
"grad_norm": 0.5239144563674927,
"learning_rate": 9.742682209735727e-06,
"loss": 0.373,
"step": 300
},
{
"epoch": 0.57792,
"grad_norm": 0.49696826934814453,
"learning_rate": 9.739127455665872e-06,
"loss": 0.3692,
"step": 301
},
{
"epoch": 0.57984,
"grad_norm": 0.5729038715362549,
"learning_rate": 9.735548973453753e-06,
"loss": 0.4047,
"step": 302
},
{
"epoch": 0.58176,
"grad_norm": 0.5142698884010315,
"learning_rate": 9.731946781016328e-06,
"loss": 0.4281,
"step": 303
},
{
"epoch": 0.58368,
"grad_norm": 0.5355499386787415,
"learning_rate": 9.728320896389263e-06,
"loss": 0.3567,
"step": 304
},
{
"epoch": 0.5856,
"grad_norm": 0.5167937278747559,
"learning_rate": 9.724671337726855e-06,
"loss": 0.3312,
"step": 305
},
{
"epoch": 0.58752,
"grad_norm": 0.5283429026603699,
"learning_rate": 9.720998123301924e-06,
"loss": 0.3486,
"step": 306
},
{
"epoch": 0.58944,
"grad_norm": 0.5296729207038879,
"learning_rate": 9.717301271505739e-06,
"loss": 0.4197,
"step": 307
},
{
"epoch": 0.59136,
"grad_norm": 0.48838165402412415,
"learning_rate": 9.713580800847917e-06,
"loss": 0.2932,
"step": 308
},
{
"epoch": 0.59328,
"grad_norm": 0.5375988483428955,
"learning_rate": 9.709836729956326e-06,
"loss": 0.4537,
"step": 309
},
{
"epoch": 0.5952,
"grad_norm": 0.5691147446632385,
"learning_rate": 9.706069077577e-06,
"loss": 0.3784,
"step": 310
},
{
"epoch": 0.59712,
"grad_norm": 0.5432304739952087,
"learning_rate": 9.702277862574048e-06,
"loss": 0.3903,
"step": 311
},
{
"epoch": 0.59904,
"grad_norm": 0.5267314910888672,
"learning_rate": 9.698463103929542e-06,
"loss": 0.3658,
"step": 312
},
{
"epoch": 0.60096,
"grad_norm": 0.8837982416152954,
"learning_rate": 9.694624820743446e-06,
"loss": 0.4735,
"step": 313
},
{
"epoch": 0.60288,
"grad_norm": 0.5846924781799316,
"learning_rate": 9.690763032233498e-06,
"loss": 0.3813,
"step": 314
},
{
"epoch": 0.6048,
"grad_norm": 0.5922628045082092,
"learning_rate": 9.686877757735126e-06,
"loss": 0.3183,
"step": 315
},
{
"epoch": 0.60672,
"grad_norm": 0.5862982869148254,
"learning_rate": 9.682969016701357e-06,
"loss": 0.3828,
"step": 316
},
{
"epoch": 0.60864,
"grad_norm": 0.5055819153785706,
"learning_rate": 9.679036828702704e-06,
"loss": 0.3185,
"step": 317
},
{
"epoch": 0.61056,
"grad_norm": 0.5191113948822021,
"learning_rate": 9.675081213427076e-06,
"loss": 0.3781,
"step": 318
},
{
"epoch": 0.61248,
"grad_norm": 0.5721320509910583,
"learning_rate": 9.671102190679678e-06,
"loss": 0.3697,
"step": 319
},
{
"epoch": 0.6144,
"grad_norm": 0.6250271797180176,
"learning_rate": 9.66709978038292e-06,
"loss": 0.4231,
"step": 320
},
{
"epoch": 0.61632,
"grad_norm": 0.5418332815170288,
"learning_rate": 9.663074002576303e-06,
"loss": 0.3838,
"step": 321
},
{
"epoch": 0.61824,
"grad_norm": 0.693120002746582,
"learning_rate": 9.659024877416328e-06,
"loss": 0.4104,
"step": 322
},
{
"epoch": 0.62016,
"grad_norm": 0.5176163911819458,
"learning_rate": 9.654952425176392e-06,
"loss": 0.3889,
"step": 323
},
{
"epoch": 0.62208,
"grad_norm": 0.5591539144515991,
"learning_rate": 9.650856666246693e-06,
"loss": 0.3684,
"step": 324
},
{
"epoch": 0.624,
"grad_norm": 0.5936065912246704,
"learning_rate": 9.646737621134112e-06,
"loss": 0.4147,
"step": 325
},
{
"epoch": 0.62592,
"grad_norm": 0.56556636095047,
"learning_rate": 9.642595310462133e-06,
"loss": 0.4069,
"step": 326
},
{
"epoch": 0.62784,
"grad_norm": 0.5234876871109009,
"learning_rate": 9.638429754970715e-06,
"loss": 0.3899,
"step": 327
},
{
"epoch": 0.62976,
"grad_norm": 0.6373021006584167,
"learning_rate": 9.63424097551621e-06,
"loss": 0.3975,
"step": 328
},
{
"epoch": 0.63168,
"grad_norm": 0.6174745559692383,
"learning_rate": 9.630028993071244e-06,
"loss": 0.343,
"step": 329
},
{
"epoch": 0.6336,
"grad_norm": 0.599328875541687,
"learning_rate": 9.62579382872462e-06,
"loss": 0.4428,
"step": 330
},
{
"epoch": 0.63552,
"grad_norm": 0.6742843389511108,
"learning_rate": 9.621535503681205e-06,
"loss": 0.4131,
"step": 331
},
{
"epoch": 0.63744,
"grad_norm": 0.5281351208686829,
"learning_rate": 9.617254039261835e-06,
"loss": 0.4621,
"step": 332
},
{
"epoch": 0.63936,
"grad_norm": 0.5671970844268799,
"learning_rate": 9.612949456903195e-06,
"loss": 0.365,
"step": 333
},
{
"epoch": 0.64128,
"grad_norm": 0.5644413232803345,
"learning_rate": 9.608621778157722e-06,
"loss": 0.3767,
"step": 334
},
{
"epoch": 0.6432,
"grad_norm": 0.555192232131958,
"learning_rate": 9.604271024693495e-06,
"loss": 0.3567,
"step": 335
},
{
"epoch": 0.64512,
"grad_norm": 0.5586106181144714,
"learning_rate": 9.599897218294122e-06,
"loss": 0.3383,
"step": 336
},
{
"epoch": 0.64704,
"grad_norm": 0.5800786018371582,
"learning_rate": 9.595500380858632e-06,
"loss": 0.3907,
"step": 337
},
{
"epoch": 0.64896,
"grad_norm": 0.5682685971260071,
"learning_rate": 9.591080534401371e-06,
"loss": 0.3825,
"step": 338
},
{
"epoch": 0.65088,
"grad_norm": 0.49595436453819275,
"learning_rate": 9.586637701051886e-06,
"loss": 0.331,
"step": 339
},
{
"epoch": 0.6528,
"grad_norm": 0.5792511105537415,
"learning_rate": 9.582171903054815e-06,
"loss": 0.4045,
"step": 340
},
{
"epoch": 0.65472,
"grad_norm": 0.5528738498687744,
"learning_rate": 9.577683162769781e-06,
"loss": 0.3169,
"step": 341
},
{
"epoch": 0.65664,
"grad_norm": 0.5429103374481201,
"learning_rate": 9.573171502671273e-06,
"loss": 0.3607,
"step": 342
},
{
"epoch": 0.65856,
"grad_norm": 0.5574133992195129,
"learning_rate": 9.568636945348534e-06,
"loss": 0.3586,
"step": 343
},
{
"epoch": 0.66048,
"grad_norm": 0.5743346810340881,
"learning_rate": 9.564079513505455e-06,
"loss": 0.3922,
"step": 344
},
{
"epoch": 0.6624,
"grad_norm": 0.5865179300308228,
"learning_rate": 9.55949922996045e-06,
"loss": 0.388,
"step": 345
},
{
"epoch": 0.66432,
"grad_norm": 0.5458997488021851,
"learning_rate": 9.554896117646357e-06,
"loss": 0.3969,
"step": 346
},
{
"epoch": 0.66624,
"grad_norm": 0.5616030097007751,
"learning_rate": 9.550270199610307e-06,
"loss": 0.4346,
"step": 347
},
{
"epoch": 0.66816,
"grad_norm": 0.5889962911605835,
"learning_rate": 9.54562149901362e-06,
"loss": 0.382,
"step": 348
},
{
"epoch": 0.67008,
"grad_norm": 0.5377645492553711,
"learning_rate": 9.54095003913168e-06,
"loss": 0.3881,
"step": 349
},
{
"epoch": 0.672,
"grad_norm": 0.5386795997619629,
"learning_rate": 9.536255843353832e-06,
"loss": 0.3717,
"step": 350
},
{
"epoch": 0.67392,
"grad_norm": 0.5452522039413452,
"learning_rate": 9.531538935183252e-06,
"loss": 0.3668,
"step": 351
},
{
"epoch": 0.67584,
"grad_norm": 0.5414353609085083,
"learning_rate": 9.526799338236828e-06,
"loss": 0.3198,
"step": 352
},
{
"epoch": 0.67776,
"grad_norm": 0.553281843662262,
"learning_rate": 9.522037076245057e-06,
"loss": 0.3661,
"step": 353
},
{
"epoch": 0.67968,
"grad_norm": 0.5341468453407288,
"learning_rate": 9.517252173051912e-06,
"loss": 0.3963,
"step": 354
},
{
"epoch": 0.6816,
"grad_norm": 0.5651521682739258,
"learning_rate": 9.512444652614728e-06,
"loss": 0.3889,
"step": 355
},
{
"epoch": 0.68352,
"grad_norm": 0.5556850433349609,
"learning_rate": 9.507614539004082e-06,
"loss": 0.3729,
"step": 356
},
{
"epoch": 0.68544,
"grad_norm": 0.5719324350357056,
"learning_rate": 9.502761856403668e-06,
"loss": 0.3598,
"step": 357
},
{
"epoch": 0.68736,
"grad_norm": 0.5571504235267639,
"learning_rate": 9.497886629110187e-06,
"loss": 0.3719,
"step": 358
},
{
"epoch": 0.68928,
"grad_norm": 0.5470439195632935,
"learning_rate": 9.49298888153321e-06,
"loss": 0.3576,
"step": 359
},
{
"epoch": 0.6912,
"grad_norm": 0.5296922922134399,
"learning_rate": 9.488068638195072e-06,
"loss": 0.3481,
"step": 360
},
{
"epoch": 0.69312,
"grad_norm": 0.5079280138015747,
"learning_rate": 9.483125923730731e-06,
"loss": 0.3829,
"step": 361
},
{
"epoch": 0.69504,
"grad_norm": 0.5707730054855347,
"learning_rate": 9.478160762887668e-06,
"loss": 0.3504,
"step": 362
},
{
"epoch": 0.69696,
"grad_norm": 0.532460629940033,
"learning_rate": 9.473173180525737e-06,
"loss": 0.4058,
"step": 363
},
{
"epoch": 0.69888,
"grad_norm": 0.5912449955940247,
"learning_rate": 9.468163201617063e-06,
"loss": 0.4256,
"step": 364
},
{
"epoch": 0.7008,
"grad_norm": 0.546734094619751,
"learning_rate": 9.463130851245899e-06,
"loss": 0.4338,
"step": 365
},
{
"epoch": 0.70272,
"grad_norm": 0.5046254992485046,
"learning_rate": 9.458076154608515e-06,
"loss": 0.3841,
"step": 366
},
{
"epoch": 0.70464,
"grad_norm": 0.6258842945098877,
"learning_rate": 9.452999137013064e-06,
"loss": 0.4257,
"step": 367
},
{
"epoch": 0.70656,
"grad_norm": 0.5526532530784607,
"learning_rate": 9.447899823879456e-06,
"loss": 0.3666,
"step": 368
},
{
"epoch": 0.70848,
"grad_norm": 0.5352730751037598,
"learning_rate": 9.442778240739234e-06,
"loss": 0.4154,
"step": 369
},
{
"epoch": 0.7104,
"grad_norm": 0.5436933636665344,
"learning_rate": 9.437634413235437e-06,
"loss": 0.3953,
"step": 370
},
{
"epoch": 0.71232,
"grad_norm": 0.5149994492530823,
"learning_rate": 9.43246836712249e-06,
"loss": 0.3403,
"step": 371
},
{
"epoch": 0.71424,
"grad_norm": 0.5639749765396118,
"learning_rate": 9.427280128266049e-06,
"loss": 0.3935,
"step": 372
},
{
"epoch": 0.71616,
"grad_norm": 0.5508785843849182,
"learning_rate": 9.4220697226429e-06,
"loss": 0.3713,
"step": 373
},
{
"epoch": 0.71808,
"grad_norm": 0.5676838159561157,
"learning_rate": 9.416837176340807e-06,
"loss": 0.3496,
"step": 374
},
{
"epoch": 0.72,
"grad_norm": 0.5941374897956848,
"learning_rate": 9.411582515558391e-06,
"loss": 0.4433,
"step": 375
},
{
"epoch": 0.72192,
"grad_norm": 0.522333025932312,
"learning_rate": 9.406305766604996e-06,
"loss": 0.3792,
"step": 376
},
{
"epoch": 0.72384,
"grad_norm": 0.5115587115287781,
"learning_rate": 9.401006955900555e-06,
"loss": 0.353,
"step": 377
},
{
"epoch": 0.72576,
"grad_norm": 0.5151971578598022,
"learning_rate": 9.395686109975475e-06,
"loss": 0.412,
"step": 378
},
{
"epoch": 0.72768,
"grad_norm": 0.48902374505996704,
"learning_rate": 9.390343255470471e-06,
"loss": 0.3632,
"step": 379
},
{
"epoch": 0.7296,
"grad_norm": 0.5112143754959106,
"learning_rate": 9.384978419136469e-06,
"loss": 0.3645,
"step": 380
},
{
"epoch": 0.73152,
"grad_norm": 0.5630471706390381,
"learning_rate": 9.37959162783444e-06,
"loss": 0.371,
"step": 381
},
{
"epoch": 0.73344,
"grad_norm": 0.5599665641784668,
"learning_rate": 9.374182908535293e-06,
"loss": 0.3918,
"step": 382
},
{
"epoch": 0.73536,
"grad_norm": 0.5166739821434021,
"learning_rate": 9.368752288319722e-06,
"loss": 0.3897,
"step": 383
},
{
"epoch": 0.73728,
"grad_norm": 0.6125780940055847,
"learning_rate": 9.363299794378072e-06,
"loss": 0.3375,
"step": 384
},
{
"epoch": 0.7392,
"grad_norm": 0.5624171495437622,
"learning_rate": 9.357825454010214e-06,
"loss": 0.4032,
"step": 385
},
{
"epoch": 0.74112,
"grad_norm": 0.5462583899497986,
"learning_rate": 9.352329294625397e-06,
"loss": 0.3703,
"step": 386
},
{
"epoch": 0.74304,
"grad_norm": 0.5165379047393799,
"learning_rate": 9.346811343742115e-06,
"loss": 0.3289,
"step": 387
},
{
"epoch": 0.74496,
"grad_norm": 0.7397446036338806,
"learning_rate": 9.34127162898797e-06,
"loss": 0.4027,
"step": 388
},
{
"epoch": 0.74688,
"grad_norm": 0.49132126569747925,
"learning_rate": 9.335710178099528e-06,
"loss": 0.305,
"step": 389
},
{
"epoch": 0.7488,
"grad_norm": 0.5432937741279602,
"learning_rate": 9.330127018922195e-06,
"loss": 0.3661,
"step": 390
},
{
"epoch": 0.75072,
"grad_norm": 0.5612923502922058,
"learning_rate": 9.324522179410054e-06,
"loss": 0.3685,
"step": 391
},
{
"epoch": 0.75264,
"grad_norm": 0.49139928817749023,
"learning_rate": 9.318895687625752e-06,
"loss": 0.3587,
"step": 392
},
{
"epoch": 0.75456,
"grad_norm": 0.5951716899871826,
"learning_rate": 9.313247571740336e-06,
"loss": 0.4437,
"step": 393
},
{
"epoch": 0.75648,
"grad_norm": 0.49642762541770935,
"learning_rate": 9.307577860033123e-06,
"loss": 0.3694,
"step": 394
},
{
"epoch": 0.7584,
"grad_norm": 0.5680660605430603,
"learning_rate": 9.301886580891563e-06,
"loss": 0.3594,
"step": 395
},
{
"epoch": 0.76032,
"grad_norm": 0.5461047291755676,
"learning_rate": 9.296173762811084e-06,
"loss": 0.3664,
"step": 396
},
{
"epoch": 0.76224,
"grad_norm": 0.496517151594162,
"learning_rate": 9.290439434394965e-06,
"loss": 0.3475,
"step": 397
},
{
"epoch": 0.76416,
"grad_norm": 0.5309035181999207,
"learning_rate": 9.284683624354172e-06,
"loss": 0.408,
"step": 398
},
{
"epoch": 0.76608,
"grad_norm": 0.566498875617981,
"learning_rate": 9.278906361507238e-06,
"loss": 0.3676,
"step": 399
},
{
"epoch": 0.768,
"grad_norm": 0.4777064025402069,
"learning_rate": 9.273107674780102e-06,
"loss": 0.3531,
"step": 400
},
{
"epoch": 0.76992,
"grad_norm": 0.48240482807159424,
"learning_rate": 9.26728759320597e-06,
"loss": 0.3496,
"step": 401
},
{
"epoch": 0.77184,
"grad_norm": 0.6093207001686096,
"learning_rate": 9.261446145925167e-06,
"loss": 0.5171,
"step": 402
},
{
"epoch": 0.77376,
"grad_norm": 0.5653449296951294,
"learning_rate": 9.255583362184998e-06,
"loss": 0.3579,
"step": 403
},
{
"epoch": 0.77568,
"grad_norm": 0.6085588335990906,
"learning_rate": 9.249699271339594e-06,
"loss": 0.3366,
"step": 404
},
{
"epoch": 0.7776,
"grad_norm": 0.48358920216560364,
"learning_rate": 9.243793902849764e-06,
"loss": 0.347,
"step": 405
},
{
"epoch": 0.77952,
"grad_norm": 0.5164780616760254,
"learning_rate": 9.237867286282855e-06,
"loss": 0.3308,
"step": 406
},
{
"epoch": 0.78144,
"grad_norm": 0.5597543716430664,
"learning_rate": 9.231919451312603e-06,
"loss": 0.4055,
"step": 407
},
{
"epoch": 0.78336,
"grad_norm": 0.5345094203948975,
"learning_rate": 9.225950427718974e-06,
"loss": 0.3355,
"step": 408
},
{
"epoch": 0.78528,
"grad_norm": 0.509782075881958,
"learning_rate": 9.21996024538803e-06,
"loss": 0.3937,
"step": 409
},
{
"epoch": 0.7872,
"grad_norm": 0.5282305479049683,
"learning_rate": 9.213948934311767e-06,
"loss": 0.3517,
"step": 410
},
{
"epoch": 0.78912,
"grad_norm": 0.5681894421577454,
"learning_rate": 9.207916524587971e-06,
"loss": 0.418,
"step": 411
},
{
"epoch": 0.79104,
"grad_norm": 0.5215588808059692,
"learning_rate": 9.201863046420065e-06,
"loss": 0.3729,
"step": 412
},
{
"epoch": 0.79296,
"grad_norm": 0.5133134722709656,
"learning_rate": 9.195788530116962e-06,
"loss": 0.4092,
"step": 413
},
{
"epoch": 0.79488,
"grad_norm": 0.4911287724971771,
"learning_rate": 9.189693006092907e-06,
"loss": 0.3365,
"step": 414
},
{
"epoch": 0.7968,
"grad_norm": 0.4739636480808258,
"learning_rate": 9.183576504867328e-06,
"loss": 0.3278,
"step": 415
},
{
"epoch": 0.79872,
"grad_norm": 0.5270048379898071,
"learning_rate": 9.177439057064684e-06,
"loss": 0.3759,
"step": 416
},
{
"epoch": 0.80064,
"grad_norm": 0.5361078381538391,
"learning_rate": 9.171280693414307e-06,
"loss": 0.3979,
"step": 417
},
{
"epoch": 0.80256,
"grad_norm": 0.5522956252098083,
"learning_rate": 9.165101444750259e-06,
"loss": 0.3951,
"step": 418
},
{
"epoch": 0.80448,
"grad_norm": 0.5326669812202454,
"learning_rate": 9.158901342011163e-06,
"loss": 0.4266,
"step": 419
},
{
"epoch": 0.8064,
"grad_norm": 0.5558049082756042,
"learning_rate": 9.152680416240059e-06,
"loss": 0.409,
"step": 420
},
{
"epoch": 0.80832,
"grad_norm": 0.49625036120414734,
"learning_rate": 9.146438698584245e-06,
"loss": 0.364,
"step": 421
},
{
"epoch": 0.81024,
"grad_norm": 0.4756894111633301,
"learning_rate": 9.140176220295118e-06,
"loss": 0.3591,
"step": 422
},
{
"epoch": 0.81216,
"grad_norm": 0.5585949420928955,
"learning_rate": 9.133893012728027e-06,
"loss": 0.3822,
"step": 423
},
{
"epoch": 0.81408,
"grad_norm": 0.5492386221885681,
"learning_rate": 9.1275891073421e-06,
"loss": 0.4289,
"step": 424
},
{
"epoch": 0.816,
"grad_norm": 0.46421384811401367,
"learning_rate": 9.121264535700107e-06,
"loss": 0.3409,
"step": 425
},
{
"epoch": 0.81792,
"grad_norm": 0.48895153403282166,
"learning_rate": 9.114919329468283e-06,
"loss": 0.3312,
"step": 426
},
{
"epoch": 0.81984,
"grad_norm": 0.5185580849647522,
"learning_rate": 9.10855352041618e-06,
"loss": 0.3744,
"step": 427
},
{
"epoch": 0.82176,
"grad_norm": 0.542297899723053,
"learning_rate": 9.102167140416503e-06,
"loss": 0.3754,
"step": 428
},
{
"epoch": 0.82368,
"grad_norm": 0.5272738933563232,
"learning_rate": 9.09576022144496e-06,
"loss": 0.3471,
"step": 429
},
{
"epoch": 0.8256,
"grad_norm": 0.5163342952728271,
"learning_rate": 9.089332795580085e-06,
"loss": 0.3901,
"step": 430
},
{
"epoch": 0.82752,
"grad_norm": 0.5210364460945129,
"learning_rate": 9.082884895003098e-06,
"loss": 0.3161,
"step": 431
},
{
"epoch": 0.82944,
"grad_norm": 0.5409610271453857,
"learning_rate": 9.076416551997721e-06,
"loss": 0.3196,
"step": 432
},
{
"epoch": 0.83136,
"grad_norm": 0.5159373879432678,
"learning_rate": 9.06992779895004e-06,
"loss": 0.4417,
"step": 433
},
{
"epoch": 0.83328,
"grad_norm": 0.5538579821586609,
"learning_rate": 9.063418668348323e-06,
"loss": 0.3612,
"step": 434
},
{
"epoch": 0.8352,
"grad_norm": 0.5332982540130615,
"learning_rate": 9.056889192782865e-06,
"loss": 0.3369,
"step": 435
},
{
"epoch": 0.83712,
"grad_norm": 0.4985886514186859,
"learning_rate": 9.050339404945834e-06,
"loss": 0.3866,
"step": 436
},
{
"epoch": 0.83904,
"grad_norm": 0.5680392384529114,
"learning_rate": 9.04376933763109e-06,
"loss": 0.4326,
"step": 437
},
{
"epoch": 0.84096,
"grad_norm": 0.5285807251930237,
"learning_rate": 9.037179023734036e-06,
"loss": 0.3171,
"step": 438
},
{
"epoch": 0.84288,
"grad_norm": 0.5423184633255005,
"learning_rate": 9.030568496251437e-06,
"loss": 0.3626,
"step": 439
},
{
"epoch": 0.8448,
"grad_norm": 0.47262755036354065,
"learning_rate": 9.023937788281278e-06,
"loss": 0.3698,
"step": 440
},
{
"epoch": 0.84672,
"grad_norm": 0.6246124505996704,
"learning_rate": 9.017286933022573e-06,
"loss": 0.391,
"step": 441
},
{
"epoch": 0.84864,
"grad_norm": 0.48114514350891113,
"learning_rate": 9.01061596377522e-06,
"loss": 0.3324,
"step": 442
},
{
"epoch": 0.85056,
"grad_norm": 0.5323843359947205,
"learning_rate": 9.003924913939816e-06,
"loss": 0.3188,
"step": 443
},
{
"epoch": 0.85248,
"grad_norm": 0.5286255478858948,
"learning_rate": 8.997213817017508e-06,
"loss": 0.3904,
"step": 444
},
{
"epoch": 0.8544,
"grad_norm": 0.5769545435905457,
"learning_rate": 8.990482706609805e-06,
"loss": 0.3693,
"step": 445
},
{
"epoch": 0.85632,
"grad_norm": 0.6176020503044128,
"learning_rate": 8.98373161641843e-06,
"loss": 0.4072,
"step": 446
},
{
"epoch": 0.85824,
"grad_norm": 0.5359836220741272,
"learning_rate": 8.97696058024514e-06,
"loss": 0.4031,
"step": 447
},
{
"epoch": 0.86016,
"grad_norm": 0.5097750425338745,
"learning_rate": 8.970169631991556e-06,
"loss": 0.346,
"step": 448
},
{
"epoch": 0.86208,
"grad_norm": 0.6056803464889526,
"learning_rate": 8.963358805658998e-06,
"loss": 0.4532,
"step": 449
},
{
"epoch": 0.864,
"grad_norm": 0.6663092970848083,
"learning_rate": 8.95652813534831e-06,
"loss": 0.4109,
"step": 450
},
{
"epoch": 0.86592,
"grad_norm": 0.5115459561347961,
"learning_rate": 8.949677655259696e-06,
"loss": 0.3722,
"step": 451
},
{
"epoch": 0.86784,
"grad_norm": 0.5519534945487976,
"learning_rate": 8.942807399692543e-06,
"loss": 0.3417,
"step": 452
},
{
"epoch": 0.86976,
"grad_norm": 0.5295504331588745,
"learning_rate": 8.935917403045251e-06,
"loss": 0.4739,
"step": 453
},
{
"epoch": 0.87168,
"grad_norm": 0.5265952944755554,
"learning_rate": 8.92900769981506e-06,
"loss": 0.3888,
"step": 454
},
{
"epoch": 0.8736,
"grad_norm": 0.49268680810928345,
"learning_rate": 8.92207832459788e-06,
"loss": 0.3282,
"step": 455
},
{
"epoch": 0.87552,
"grad_norm": 0.47371768951416016,
"learning_rate": 8.915129312088112e-06,
"loss": 0.3399,
"step": 456
},
{
"epoch": 0.87744,
"grad_norm": 0.5212621688842773,
"learning_rate": 8.908160697078482e-06,
"loss": 0.3293,
"step": 457
},
{
"epoch": 0.87936,
"grad_norm": 0.5132904052734375,
"learning_rate": 8.901172514459864e-06,
"loss": 0.4236,
"step": 458
},
{
"epoch": 0.88128,
"grad_norm": 0.520341157913208,
"learning_rate": 8.894164799221099e-06,
"loss": 0.3662,
"step": 459
},
{
"epoch": 0.8832,
"grad_norm": 0.46859651803970337,
"learning_rate": 8.88713758644883e-06,
"loss": 0.3082,
"step": 460
},
{
"epoch": 0.88512,
"grad_norm": 0.4973495900630951,
"learning_rate": 8.88009091132732e-06,
"loss": 0.3266,
"step": 461
},
{
"epoch": 0.88704,
"grad_norm": 0.4858028292655945,
"learning_rate": 8.873024809138272e-06,
"loss": 0.3674,
"step": 462
},
{
"epoch": 0.88896,
"grad_norm": 0.5313632488250732,
"learning_rate": 8.86593931526067e-06,
"loss": 0.3886,
"step": 463
},
{
"epoch": 0.89088,
"grad_norm": 0.5169888138771057,
"learning_rate": 8.858834465170576e-06,
"loss": 0.3841,
"step": 464
},
{
"epoch": 0.8928,
"grad_norm": 0.6180973052978516,
"learning_rate": 8.851710294440974e-06,
"loss": 0.4285,
"step": 465
},
{
"epoch": 0.89472,
"grad_norm": 0.5154354572296143,
"learning_rate": 8.84456683874158e-06,
"loss": 0.3799,
"step": 466
},
{
"epoch": 0.89664,
"grad_norm": 0.5949125289916992,
"learning_rate": 8.837404133838667e-06,
"loss": 0.4312,
"step": 467
},
{
"epoch": 0.89856,
"grad_norm": 0.474783718585968,
"learning_rate": 8.83022221559489e-06,
"loss": 0.3052,
"step": 468
},
{
"epoch": 0.90048,
"grad_norm": 0.512101948261261,
"learning_rate": 8.823021119969102e-06,
"loss": 0.4063,
"step": 469
},
{
"epoch": 0.9024,
"grad_norm": 0.5232632756233215,
"learning_rate": 8.815800883016168e-06,
"loss": 0.3656,
"step": 470
},
{
"epoch": 0.90432,
"grad_norm": 0.5467024445533752,
"learning_rate": 8.808561540886796e-06,
"loss": 0.3883,
"step": 471
},
{
"epoch": 0.90624,
"grad_norm": 0.5835142135620117,
"learning_rate": 8.801303129827352e-06,
"loss": 0.4083,
"step": 472
},
{
"epoch": 0.90816,
"grad_norm": 0.4766116738319397,
"learning_rate": 8.794025686179677e-06,
"loss": 0.3274,
"step": 473
},
{
"epoch": 0.91008,
"grad_norm": 0.542975664138794,
"learning_rate": 8.786729246380901e-06,
"loss": 0.3947,
"step": 474
},
{
"epoch": 0.912,
"grad_norm": 0.4831298589706421,
"learning_rate": 8.779413846963267e-06,
"loss": 0.3298,
"step": 475
},
{
"epoch": 0.91392,
"grad_norm": 0.5678033232688904,
"learning_rate": 8.772079524553951e-06,
"loss": 0.3836,
"step": 476
},
{
"epoch": 0.91584,
"grad_norm": 0.5535714626312256,
"learning_rate": 8.764726315874872e-06,
"loss": 0.3503,
"step": 477
},
{
"epoch": 0.91776,
"grad_norm": 0.5608989596366882,
"learning_rate": 8.757354257742501e-06,
"loss": 0.3828,
"step": 478
},
{
"epoch": 0.91968,
"grad_norm": 0.5105602145195007,
"learning_rate": 8.749963387067697e-06,
"loss": 0.364,
"step": 479
},
{
"epoch": 0.9216,
"grad_norm": 0.5499537587165833,
"learning_rate": 8.742553740855507e-06,
"loss": 0.4072,
"step": 480
},
{
"epoch": 0.92352,
"grad_norm": 0.5447258353233337,
"learning_rate": 8.735125356204982e-06,
"loss": 0.3514,
"step": 481
},
{
"epoch": 0.92544,
"grad_norm": 0.5518208146095276,
"learning_rate": 8.727678270308994e-06,
"loss": 0.3982,
"step": 482
},
{
"epoch": 0.92736,
"grad_norm": 0.5743223428726196,
"learning_rate": 8.720212520454054e-06,
"loss": 0.3604,
"step": 483
},
{
"epoch": 0.92928,
"grad_norm": 0.5231637358665466,
"learning_rate": 8.712728144020118e-06,
"loss": 0.3624,
"step": 484
},
{
"epoch": 0.9312,
"grad_norm": 0.49136221408843994,
"learning_rate": 8.705225178480397e-06,
"loss": 0.3443,
"step": 485
},
{
"epoch": 0.93312,
"grad_norm": 0.5141754150390625,
"learning_rate": 8.697703661401187e-06,
"loss": 0.3726,
"step": 486
},
{
"epoch": 0.93504,
"grad_norm": 0.49125832319259644,
"learning_rate": 8.69016363044166e-06,
"loss": 0.3547,
"step": 487
},
{
"epoch": 0.93696,
"grad_norm": 0.5220416784286499,
"learning_rate": 8.682605123353685e-06,
"loss": 0.3646,
"step": 488
},
{
"epoch": 0.93888,
"grad_norm": 0.5303863286972046,
"learning_rate": 8.675028177981643e-06,
"loss": 0.457,
"step": 489
},
{
"epoch": 0.9408,
"grad_norm": 0.5229626893997192,
"learning_rate": 8.66743283226223e-06,
"loss": 0.3945,
"step": 490
},
{
"epoch": 0.94272,
"grad_norm": 0.5447962880134583,
"learning_rate": 8.659819124224266e-06,
"loss": 0.3897,
"step": 491
},
{
"epoch": 0.94464,
"grad_norm": 0.537502646446228,
"learning_rate": 8.652187091988516e-06,
"loss": 0.375,
"step": 492
},
{
"epoch": 0.94656,
"grad_norm": 0.5066471099853516,
"learning_rate": 8.644536773767488e-06,
"loss": 0.3745,
"step": 493
},
{
"epoch": 0.94848,
"grad_norm": 0.5527138113975525,
"learning_rate": 8.636868207865244e-06,
"loss": 0.383,
"step": 494
},
{
"epoch": 0.9504,
"grad_norm": 0.5016779899597168,
"learning_rate": 8.629181432677213e-06,
"loss": 0.3393,
"step": 495
},
{
"epoch": 0.95232,
"grad_norm": 0.5093716979026794,
"learning_rate": 8.621476486689991e-06,
"loss": 0.355,
"step": 496
},
{
"epoch": 0.95424,
"grad_norm": 0.5443761944770813,
"learning_rate": 8.613753408481158e-06,
"loss": 0.3246,
"step": 497
},
{
"epoch": 0.95616,
"grad_norm": 0.5846705436706543,
"learning_rate": 8.606012236719073e-06,
"loss": 0.4005,
"step": 498
},
{
"epoch": 0.95808,
"grad_norm": 0.6433200836181641,
"learning_rate": 8.598253010162693e-06,
"loss": 0.3716,
"step": 499
},
{
"epoch": 0.96,
"grad_norm": 0.5293925404548645,
"learning_rate": 8.590475767661371e-06,
"loss": 0.3403,
"step": 500
},
{
"epoch": 0.96192,
"grad_norm": 0.4840909540653229,
"learning_rate": 8.58268054815466e-06,
"loss": 0.3684,
"step": 501
},
{
"epoch": 0.96384,
"grad_norm": 0.5476824641227722,
"learning_rate": 8.574867390672124e-06,
"loss": 0.3336,
"step": 502
},
{
"epoch": 0.96576,
"grad_norm": 0.5676672458648682,
"learning_rate": 8.567036334333142e-06,
"loss": 0.3874,
"step": 503
},
{
"epoch": 0.96768,
"grad_norm": 0.4576645791530609,
"learning_rate": 8.559187418346703e-06,
"loss": 0.3679,
"step": 504
},
{
"epoch": 0.9696,
"grad_norm": 0.5351736545562744,
"learning_rate": 8.551320682011227e-06,
"loss": 0.3647,
"step": 505
},
{
"epoch": 0.97152,
"grad_norm": 0.5142337083816528,
"learning_rate": 8.543436164714351e-06,
"loss": 0.3222,
"step": 506
},
{
"epoch": 0.97344,
"grad_norm": 0.6015647053718567,
"learning_rate": 8.535533905932739e-06,
"loss": 0.3699,
"step": 507
},
{
"epoch": 0.97536,
"grad_norm": 0.5323939919471741,
"learning_rate": 8.527613945231886e-06,
"loss": 0.4091,
"step": 508
},
{
"epoch": 0.97728,
"grad_norm": 0.5003075003623962,
"learning_rate": 8.519676322265914e-06,
"loss": 0.4124,
"step": 509
},
{
"epoch": 0.9792,
"grad_norm": 0.5380808711051941,
"learning_rate": 8.511721076777388e-06,
"loss": 0.3893,
"step": 510
},
{
"epoch": 0.98112,
"grad_norm": 0.5488377213478088,
"learning_rate": 8.503748248597095e-06,
"loss": 0.3873,
"step": 511
},
{
"epoch": 0.98304,
"grad_norm": 0.4986210763454437,
"learning_rate": 8.495757877643857e-06,
"loss": 0.3279,
"step": 512
},
{
"epoch": 0.98496,
"grad_norm": 0.5107905268669128,
"learning_rate": 8.487750003924341e-06,
"loss": 0.4081,
"step": 513
},
{
"epoch": 0.98688,
"grad_norm": 0.5451659560203552,
"learning_rate": 8.479724667532836e-06,
"loss": 0.3984,
"step": 514
},
{
"epoch": 0.9888,
"grad_norm": 0.4570447504520416,
"learning_rate": 8.471681908651067e-06,
"loss": 0.3763,
"step": 515
},
{
"epoch": 0.99072,
"grad_norm": 0.5440273284912109,
"learning_rate": 8.463621767547998e-06,
"loss": 0.3839,
"step": 516
},
{
"epoch": 0.99264,
"grad_norm": 0.49529963731765747,
"learning_rate": 8.455544284579614e-06,
"loss": 0.384,
"step": 517
},
{
"epoch": 0.99456,
"grad_norm": 0.5988640189170837,
"learning_rate": 8.447449500188731e-06,
"loss": 0.3891,
"step": 518
},
{
"epoch": 0.99648,
"grad_norm": 0.47181957960128784,
"learning_rate": 8.439337454904794e-06,
"loss": 0.3341,
"step": 519
},
{
"epoch": 0.9984,
"grad_norm": 0.4868067800998688,
"learning_rate": 8.43120818934367e-06,
"loss": 0.3884,
"step": 520
},
{
"epoch": 1.00128,
"grad_norm": 0.9753141403198242,
"learning_rate": 8.42306174420744e-06,
"loss": 0.6037,
"step": 521
},
{
"epoch": 1.0032,
"grad_norm": 0.5070787668228149,
"learning_rate": 8.414898160284208e-06,
"loss": 0.3366,
"step": 522
},
{
"epoch": 1.00512,
"grad_norm": 0.5253280401229858,
"learning_rate": 8.406717478447889e-06,
"loss": 0.3195,
"step": 523
},
{
"epoch": 1.00704,
"grad_norm": 0.4662684202194214,
"learning_rate": 8.398519739657997e-06,
"loss": 0.303,
"step": 524
},
{
"epoch": 1.00896,
"grad_norm": 0.448887437582016,
"learning_rate": 8.390304984959455e-06,
"loss": 0.2656,
"step": 525
},
{
"epoch": 1.01088,
"grad_norm": 0.4874585270881653,
"learning_rate": 8.382073255482381e-06,
"loss": 0.272,
"step": 526
},
{
"epoch": 1.0128,
"grad_norm": 0.5062534809112549,
"learning_rate": 8.373824592441884e-06,
"loss": 0.2849,
"step": 527
},
{
"epoch": 1.01472,
"grad_norm": 0.47738486528396606,
"learning_rate": 8.36555903713785e-06,
"loss": 0.2848,
"step": 528
},
{
"epoch": 1.01664,
"grad_norm": 0.5530681610107422,
"learning_rate": 8.357276630954756e-06,
"loss": 0.3689,
"step": 529
},
{
"epoch": 1.01856,
"grad_norm": 0.4321051239967346,
"learning_rate": 8.348977415361435e-06,
"loss": 0.2357,
"step": 530
},
{
"epoch": 1.02048,
"grad_norm": 0.5642885565757751,
"learning_rate": 8.340661431910888e-06,
"loss": 0.398,
"step": 531
},
{
"epoch": 1.0224,
"grad_norm": 0.46742841601371765,
"learning_rate": 8.332328722240072e-06,
"loss": 0.262,
"step": 532
},
{
"epoch": 1.02432,
"grad_norm": 0.5284242630004883,
"learning_rate": 8.323979328069689e-06,
"loss": 0.3623,
"step": 533
},
{
"epoch": 1.02624,
"grad_norm": 0.48360779881477356,
"learning_rate": 8.315613291203977e-06,
"loss": 0.2762,
"step": 534
},
{
"epoch": 1.02816,
"grad_norm": 0.5331019163131714,
"learning_rate": 8.307230653530501e-06,
"loss": 0.3356,
"step": 535
},
{
"epoch": 1.03008,
"grad_norm": 0.4938027858734131,
"learning_rate": 8.298831457019943e-06,
"loss": 0.3037,
"step": 536
},
{
"epoch": 1.032,
"grad_norm": 0.5478887557983398,
"learning_rate": 8.290415743725894e-06,
"loss": 0.3384,
"step": 537
},
{
"epoch": 1.03392,
"grad_norm": 0.5135610699653625,
"learning_rate": 8.28198355578465e-06,
"loss": 0.3704,
"step": 538
},
{
"epoch": 1.03584,
"grad_norm": 0.527226984500885,
"learning_rate": 8.273534935414975e-06,
"loss": 0.332,
"step": 539
},
{
"epoch": 1.03776,
"grad_norm": 0.5213348865509033,
"learning_rate": 8.265069924917925e-06,
"loss": 0.3189,
"step": 540
},
{
"epoch": 1.03968,
"grad_norm": 0.573257327079773,
"learning_rate": 8.256588566676616e-06,
"loss": 0.3714,
"step": 541
},
{
"epoch": 1.0416,
"grad_norm": 0.539148211479187,
"learning_rate": 8.248090903156003e-06,
"loss": 0.345,
"step": 542
},
{
"epoch": 1.04352,
"grad_norm": 0.47169405221939087,
"learning_rate": 8.239576976902694e-06,
"loss": 0.2966,
"step": 543
},
{
"epoch": 1.04544,
"grad_norm": 0.48423540592193604,
"learning_rate": 8.231046830544716e-06,
"loss": 0.3314,
"step": 544
},
{
"epoch": 1.04736,
"grad_norm": 0.492631733417511,
"learning_rate": 8.222500506791305e-06,
"loss": 0.3003,
"step": 545
},
{
"epoch": 1.04928,
"grad_norm": 0.554539680480957,
"learning_rate": 8.213938048432697e-06,
"loss": 0.3447,
"step": 546
},
{
"epoch": 1.0512,
"grad_norm": 2.983254909515381,
"learning_rate": 8.205359498339917e-06,
"loss": 0.5312,
"step": 547
},
{
"epoch": 1.05312,
"grad_norm": 0.5011314749717712,
"learning_rate": 8.196764899464552e-06,
"loss": 0.3261,
"step": 548
},
{
"epoch": 1.05504,
"grad_norm": 0.5131269693374634,
"learning_rate": 8.188154294838542e-06,
"loss": 0.3294,
"step": 549
},
{
"epoch": 1.05696,
"grad_norm": 0.45591896772384644,
"learning_rate": 8.179527727573975e-06,
"loss": 0.253,
"step": 550
},
{
"epoch": 1.05888,
"grad_norm": 0.5419086217880249,
"learning_rate": 8.170885240862854e-06,
"loss": 0.3421,
"step": 551
},
{
"epoch": 1.0608,
"grad_norm": 0.5195319652557373,
"learning_rate": 8.162226877976886e-06,
"loss": 0.3435,
"step": 552
},
{
"epoch": 1.06272,
"grad_norm": 0.4708893597126007,
"learning_rate": 8.153552682267278e-06,
"loss": 0.3076,
"step": 553
},
{
"epoch": 1.06464,
"grad_norm": 0.45166927576065063,
"learning_rate": 8.144862697164499e-06,
"loss": 0.3291,
"step": 554
},
{
"epoch": 1.06656,
"grad_norm": 0.5177391171455383,
"learning_rate": 8.136156966178082e-06,
"loss": 0.3509,
"step": 555
},
{
"epoch": 1.06848,
"grad_norm": 0.5296358466148376,
"learning_rate": 8.127435532896388e-06,
"loss": 0.3583,
"step": 556
},
{
"epoch": 1.0704,
"grad_norm": 0.5676787495613098,
"learning_rate": 8.118698440986405e-06,
"loss": 0.3369,
"step": 557
},
{
"epoch": 1.07232,
"grad_norm": 0.4662168025970459,
"learning_rate": 8.10994573419352e-06,
"loss": 0.2875,
"step": 558
},
{
"epoch": 1.07424,
"grad_norm": 0.5258364677429199,
"learning_rate": 8.101177456341301e-06,
"loss": 0.3498,
"step": 559
},
{
"epoch": 1.07616,
"grad_norm": 0.502865195274353,
"learning_rate": 8.092393651331275e-06,
"loss": 0.3321,
"step": 560
},
{
"epoch": 1.07808,
"grad_norm": 0.5539703965187073,
"learning_rate": 8.083594363142717e-06,
"loss": 0.3131,
"step": 561
},
{
"epoch": 1.08,
"grad_norm": 0.49460116028785706,
"learning_rate": 8.074779635832417e-06,
"loss": 0.3025,
"step": 562
},
{
"epoch": 1.08192,
"grad_norm": 0.49678945541381836,
"learning_rate": 8.065949513534474e-06,
"loss": 0.3528,
"step": 563
},
{
"epoch": 1.08384,
"grad_norm": 0.5165755748748779,
"learning_rate": 8.057104040460062e-06,
"loss": 0.3872,
"step": 564
},
{
"epoch": 1.08576,
"grad_norm": 0.48405030369758606,
"learning_rate": 8.048243260897216e-06,
"loss": 0.335,
"step": 565
},
{
"epoch": 1.08768,
"grad_norm": 0.4738030731678009,
"learning_rate": 8.03936721921061e-06,
"loss": 0.3529,
"step": 566
},
{
"epoch": 1.0896,
"grad_norm": 0.5356886982917786,
"learning_rate": 8.030475959841333e-06,
"loss": 0.4062,
"step": 567
},
{
"epoch": 1.09152,
"grad_norm": 0.48704642057418823,
"learning_rate": 8.021569527306663e-06,
"loss": 0.3071,
"step": 568
},
{
"epoch": 1.09344,
"grad_norm": 0.45638754963874817,
"learning_rate": 8.012647966199851e-06,
"loss": 0.2569,
"step": 569
},
{
"epoch": 1.09536,
"grad_norm": 0.5822261571884155,
"learning_rate": 8.003711321189895e-06,
"loss": 0.3642,
"step": 570
},
{
"epoch": 1.09728,
"grad_norm": 0.4678220748901367,
"learning_rate": 7.994759637021316e-06,
"loss": 0.2663,
"step": 571
},
{
"epoch": 1.0992,
"grad_norm": 0.5074660181999207,
"learning_rate": 7.985792958513932e-06,
"loss": 0.2901,
"step": 572
},
{
"epoch": 1.10112,
"grad_norm": 0.48952677845954895,
"learning_rate": 7.976811330562637e-06,
"loss": 0.2889,
"step": 573
},
{
"epoch": 1.10304,
"grad_norm": 0.5047484040260315,
"learning_rate": 7.967814798137173e-06,
"loss": 0.3839,
"step": 574
},
{
"epoch": 1.10496,
"grad_norm": 0.5316326022148132,
"learning_rate": 7.95880340628191e-06,
"loss": 0.3556,
"step": 575
},
{
"epoch": 1.10688,
"grad_norm": 0.5576931834220886,
"learning_rate": 7.949777200115617e-06,
"loss": 0.3916,
"step": 576
},
{
"epoch": 1.1088,
"grad_norm": 0.5140984058380127,
"learning_rate": 7.940736224831227e-06,
"loss": 0.2913,
"step": 577
},
{
"epoch": 1.11072,
"grad_norm": 0.4851345121860504,
"learning_rate": 7.931680525695634e-06,
"loss": 0.2819,
"step": 578
},
{
"epoch": 1.11264,
"grad_norm": 0.5061103701591492,
"learning_rate": 7.922610148049445e-06,
"loss": 0.3336,
"step": 579
},
{
"epoch": 1.11456,
"grad_norm": 0.5128313302993774,
"learning_rate": 7.913525137306756e-06,
"loss": 0.3259,
"step": 580
},
{
"epoch": 1.11648,
"grad_norm": 0.48406460881233215,
"learning_rate": 7.90442553895494e-06,
"loss": 0.309,
"step": 581
},
{
"epoch": 1.1184,
"grad_norm": 0.4796968698501587,
"learning_rate": 7.895311398554395e-06,
"loss": 0.3456,
"step": 582
},
{
"epoch": 1.12032,
"grad_norm": 0.6090113520622253,
"learning_rate": 7.886182761738339e-06,
"loss": 0.3616,
"step": 583
},
{
"epoch": 1.12224,
"grad_norm": 0.5101168751716614,
"learning_rate": 7.877039674212569e-06,
"loss": 0.2961,
"step": 584
},
{
"epoch": 1.12416,
"grad_norm": 0.5289978384971619,
"learning_rate": 7.86788218175523e-06,
"loss": 0.3847,
"step": 585
},
{
"epoch": 1.12608,
"grad_norm": 0.5492078065872192,
"learning_rate": 7.8587103302166e-06,
"loss": 0.37,
"step": 586
},
{
"epoch": 1.1280000000000001,
"grad_norm": 0.5313044786453247,
"learning_rate": 7.849524165518838e-06,
"loss": 0.3892,
"step": 587
},
{
"epoch": 1.12992,
"grad_norm": 0.5143792629241943,
"learning_rate": 7.84032373365578e-06,
"loss": 0.3275,
"step": 588
},
{
"epoch": 1.13184,
"grad_norm": 0.5050796866416931,
"learning_rate": 7.831109080692687e-06,
"loss": 0.3186,
"step": 589
},
{
"epoch": 1.13376,
"grad_norm": 0.5204614996910095,
"learning_rate": 7.821880252766025e-06,
"loss": 0.3502,
"step": 590
},
{
"epoch": 1.13568,
"grad_norm": 0.4545805752277374,
"learning_rate": 7.812637296083235e-06,
"loss": 0.2672,
"step": 591
},
{
"epoch": 1.1376,
"grad_norm": 0.5239761471748352,
"learning_rate": 7.803380256922495e-06,
"loss": 0.3645,
"step": 592
},
{
"epoch": 1.13952,
"grad_norm": 0.5044740438461304,
"learning_rate": 7.794109181632494e-06,
"loss": 0.3011,
"step": 593
},
{
"epoch": 1.14144,
"grad_norm": 0.49390727281570435,
"learning_rate": 7.784824116632198e-06,
"loss": 0.316,
"step": 594
},
{
"epoch": 1.14336,
"grad_norm": 0.5298884510993958,
"learning_rate": 7.775525108410616e-06,
"loss": 0.3385,
"step": 595
},
{
"epoch": 1.14528,
"grad_norm": 0.49875321984291077,
"learning_rate": 7.76621220352657e-06,
"loss": 0.3214,
"step": 596
},
{
"epoch": 1.1472,
"grad_norm": 0.46446898579597473,
"learning_rate": 7.75688544860846e-06,
"loss": 0.3189,
"step": 597
},
{
"epoch": 1.14912,
"grad_norm": 0.47222620248794556,
"learning_rate": 7.747544890354031e-06,
"loss": 0.3205,
"step": 598
},
{
"epoch": 1.15104,
"grad_norm": 0.5359264016151428,
"learning_rate": 7.73819057553014e-06,
"loss": 0.3443,
"step": 599
},
{
"epoch": 1.15296,
"grad_norm": 0.6120325922966003,
"learning_rate": 7.728822550972523e-06,
"loss": 0.3898,
"step": 600
},
{
"epoch": 1.15488,
"grad_norm": 0.5003960132598877,
"learning_rate": 7.719440863585555e-06,
"loss": 0.2785,
"step": 601
},
{
"epoch": 1.1568,
"grad_norm": 0.5268198847770691,
"learning_rate": 7.710045560342021e-06,
"loss": 0.3765,
"step": 602
},
{
"epoch": 1.15872,
"grad_norm": 0.5251873135566711,
"learning_rate": 7.700636688282878e-06,
"loss": 0.3271,
"step": 603
},
{
"epoch": 1.16064,
"grad_norm": 0.5671840310096741,
"learning_rate": 7.69121429451702e-06,
"loss": 0.3182,
"step": 604
},
{
"epoch": 1.16256,
"grad_norm": 0.4827718436717987,
"learning_rate": 7.681778426221043e-06,
"loss": 0.3176,
"step": 605
},
{
"epoch": 1.16448,
"grad_norm": 0.49680420756340027,
"learning_rate": 7.672329130639007e-06,
"loss": 0.3584,
"step": 606
},
{
"epoch": 1.1663999999999999,
"grad_norm": 0.5488826036453247,
"learning_rate": 7.662866455082201e-06,
"loss": 0.3877,
"step": 607
},
{
"epoch": 1.16832,
"grad_norm": 0.5408145785331726,
"learning_rate": 7.65339044692891e-06,
"loss": 0.359,
"step": 608
},
{
"epoch": 1.17024,
"grad_norm": 0.4843355715274811,
"learning_rate": 7.643901153624163e-06,
"loss": 0.3301,
"step": 609
},
{
"epoch": 1.17216,
"grad_norm": 0.4871354103088379,
"learning_rate": 7.634398622679517e-06,
"loss": 0.3402,
"step": 610
},
{
"epoch": 1.17408,
"grad_norm": 0.5074546337127686,
"learning_rate": 7.624882901672801e-06,
"loss": 0.314,
"step": 611
},
{
"epoch": 1.176,
"grad_norm": 0.5373762249946594,
"learning_rate": 7.615354038247889e-06,
"loss": 0.3844,
"step": 612
},
{
"epoch": 1.17792,
"grad_norm": 0.4979964792728424,
"learning_rate": 7.605812080114453e-06,
"loss": 0.3744,
"step": 613
},
{
"epoch": 1.17984,
"grad_norm": 0.4943200647830963,
"learning_rate": 7.59625707504773e-06,
"loss": 0.3502,
"step": 614
},
{
"epoch": 1.18176,
"grad_norm": 1.943246603012085,
"learning_rate": 7.586689070888284e-06,
"loss": 0.3884,
"step": 615
},
{
"epoch": 1.18368,
"grad_norm": 0.5443961024284363,
"learning_rate": 7.577108115541761e-06,
"loss": 0.3651,
"step": 616
},
{
"epoch": 1.1856,
"grad_norm": 0.7367193698883057,
"learning_rate": 7.567514256978652e-06,
"loss": 0.447,
"step": 617
},
{
"epoch": 1.18752,
"grad_norm": 0.5130341053009033,
"learning_rate": 7.557907543234051e-06,
"loss": 0.3393,
"step": 618
},
{
"epoch": 1.18944,
"grad_norm": 0.4912329614162445,
"learning_rate": 7.54828802240742e-06,
"loss": 0.2882,
"step": 619
},
{
"epoch": 1.19136,
"grad_norm": 0.4751531183719635,
"learning_rate": 7.53865574266234e-06,
"loss": 0.2849,
"step": 620
},
{
"epoch": 1.1932800000000001,
"grad_norm": 0.5178718566894531,
"learning_rate": 7.529010752226277e-06,
"loss": 0.3358,
"step": 621
},
{
"epoch": 1.1952,
"grad_norm": 0.5118333101272583,
"learning_rate": 7.519353099390336e-06,
"loss": 0.3361,
"step": 622
},
{
"epoch": 1.19712,
"grad_norm": 0.4875522553920746,
"learning_rate": 7.509682832509024e-06,
"loss": 0.331,
"step": 623
},
{
"epoch": 1.19904,
"grad_norm": 0.5220014452934265,
"learning_rate": 7.500000000000001e-06,
"loss": 0.3263,
"step": 624
},
{
"epoch": 1.20096,
"grad_norm": 0.4730300009250641,
"learning_rate": 7.490304650343842e-06,
"loss": 0.3026,
"step": 625
},
{
"epoch": 1.20288,
"grad_norm": 0.5186544060707092,
"learning_rate": 7.480596832083795e-06,
"loss": 0.3073,
"step": 626
},
{
"epoch": 1.2048,
"grad_norm": 0.5324330925941467,
"learning_rate": 7.470876593825535e-06,
"loss": 0.3627,
"step": 627
},
{
"epoch": 1.20672,
"grad_norm": 0.48234570026397705,
"learning_rate": 7.461143984236925e-06,
"loss": 0.2999,
"step": 628
},
{
"epoch": 1.20864,
"grad_norm": 0.47809162735939026,
"learning_rate": 7.451399052047764e-06,
"loss": 0.3069,
"step": 629
},
{
"epoch": 1.21056,
"grad_norm": 0.5181688070297241,
"learning_rate": 7.441641846049557e-06,
"loss": 0.2809,
"step": 630
},
{
"epoch": 1.21248,
"grad_norm": 0.5020263195037842,
"learning_rate": 7.431872415095252e-06,
"loss": 0.339,
"step": 631
},
{
"epoch": 1.2144,
"grad_norm": 0.49763748049736023,
"learning_rate": 7.422090808099014e-06,
"loss": 0.3376,
"step": 632
},
{
"epoch": 1.21632,
"grad_norm": 0.4587874710559845,
"learning_rate": 7.412297074035968e-06,
"loss": 0.2821,
"step": 633
},
{
"epoch": 1.21824,
"grad_norm": 0.5550690293312073,
"learning_rate": 7.402491261941958e-06,
"loss": 0.3681,
"step": 634
},
{
"epoch": 1.22016,
"grad_norm": 0.45928066968917847,
"learning_rate": 7.3926734209133e-06,
"loss": 0.2643,
"step": 635
},
{
"epoch": 1.22208,
"grad_norm": 0.5000253915786743,
"learning_rate": 7.382843600106539e-06,
"loss": 0.3381,
"step": 636
},
{
"epoch": 1.224,
"grad_norm": 0.4916457533836365,
"learning_rate": 7.373001848738203e-06,
"loss": 0.302,
"step": 637
},
{
"epoch": 1.22592,
"grad_norm": 0.5953068733215332,
"learning_rate": 7.363148216084548e-06,
"loss": 0.3963,
"step": 638
},
{
"epoch": 1.22784,
"grad_norm": 0.5219518542289734,
"learning_rate": 7.353282751481324e-06,
"loss": 0.3031,
"step": 639
},
{
"epoch": 1.22976,
"grad_norm": 0.5044402480125427,
"learning_rate": 7.343405504323519e-06,
"loss": 0.3325,
"step": 640
},
{
"epoch": 1.2316799999999999,
"grad_norm": 0.5092101693153381,
"learning_rate": 7.333516524065116e-06,
"loss": 0.3424,
"step": 641
},
{
"epoch": 1.2336,
"grad_norm": 0.5135781168937683,
"learning_rate": 7.323615860218844e-06,
"loss": 0.3199,
"step": 642
},
{
"epoch": 1.23552,
"grad_norm": 0.5099557638168335,
"learning_rate": 7.3137035623559274e-06,
"loss": 0.3337,
"step": 643
},
{
"epoch": 1.23744,
"grad_norm": 0.45756766200065613,
"learning_rate": 7.303779680105844e-06,
"loss": 0.239,
"step": 644
},
{
"epoch": 1.23936,
"grad_norm": 0.565156877040863,
"learning_rate": 7.2938442631560714e-06,
"loss": 0.3698,
"step": 645
},
{
"epoch": 1.24128,
"grad_norm": 0.5015484094619751,
"learning_rate": 7.28389736125184e-06,
"loss": 0.342,
"step": 646
},
{
"epoch": 1.2432,
"grad_norm": 0.503028392791748,
"learning_rate": 7.273939024195881e-06,
"loss": 0.3678,
"step": 647
},
{
"epoch": 1.24512,
"grad_norm": 0.5198860764503479,
"learning_rate": 7.263969301848188e-06,
"loss": 0.3673,
"step": 648
},
{
"epoch": 1.24704,
"grad_norm": 0.5108463168144226,
"learning_rate": 7.2539882441257484e-06,
"loss": 0.3423,
"step": 649
},
{
"epoch": 1.24896,
"grad_norm": 0.4973859488964081,
"learning_rate": 7.243995901002312e-06,
"loss": 0.321,
"step": 650
},
{
"epoch": 1.25088,
"grad_norm": 0.563503086566925,
"learning_rate": 7.2339923225081296e-06,
"loss": 0.3281,
"step": 651
},
{
"epoch": 1.2528000000000001,
"grad_norm": 0.4967746436595917,
"learning_rate": 7.223977558729707e-06,
"loss": 0.3174,
"step": 652
},
{
"epoch": 1.25472,
"grad_norm": 0.5172938108444214,
"learning_rate": 7.213951659809553e-06,
"loss": 0.34,
"step": 653
},
{
"epoch": 1.25664,
"grad_norm": 0.5202799439430237,
"learning_rate": 7.203914675945929e-06,
"loss": 0.3215,
"step": 654
},
{
"epoch": 1.2585600000000001,
"grad_norm": 0.5658923983573914,
"learning_rate": 7.193866657392597e-06,
"loss": 0.3792,
"step": 655
},
{
"epoch": 1.26048,
"grad_norm": 0.6044734120368958,
"learning_rate": 7.183807654458565e-06,
"loss": 0.3587,
"step": 656
},
{
"epoch": 1.2624,
"grad_norm": 0.5581307411193848,
"learning_rate": 7.173737717507843e-06,
"loss": 0.3599,
"step": 657
},
{
"epoch": 1.26432,
"grad_norm": 0.5176196098327637,
"learning_rate": 7.163656896959181e-06,
"loss": 0.2892,
"step": 658
},
{
"epoch": 1.26624,
"grad_norm": 0.5845641493797302,
"learning_rate": 7.153565243285829e-06,
"loss": 0.3522,
"step": 659
},
{
"epoch": 1.26816,
"grad_norm": 0.526369035243988,
"learning_rate": 7.143462807015271e-06,
"loss": 0.336,
"step": 660
},
{
"epoch": 1.27008,
"grad_norm": 0.4802703857421875,
"learning_rate": 7.133349638728979e-06,
"loss": 0.3448,
"step": 661
},
{
"epoch": 1.272,
"grad_norm": 0.5456414222717285,
"learning_rate": 7.1232257890621605e-06,
"loss": 0.3376,
"step": 662
},
{
"epoch": 1.27392,
"grad_norm": 0.4875660240650177,
"learning_rate": 7.113091308703498e-06,
"loss": 0.281,
"step": 663
},
{
"epoch": 1.27584,
"grad_norm": 0.47632116079330444,
"learning_rate": 7.102946248394908e-06,
"loss": 0.2764,
"step": 664
},
{
"epoch": 1.27776,
"grad_norm": 0.5013471245765686,
"learning_rate": 7.092790658931274e-06,
"loss": 0.2541,
"step": 665
},
{
"epoch": 1.27968,
"grad_norm": 0.613395631313324,
"learning_rate": 7.082624591160201e-06,
"loss": 0.3573,
"step": 666
},
{
"epoch": 1.2816,
"grad_norm": 0.4976433515548706,
"learning_rate": 7.072448095981751e-06,
"loss": 0.3533,
"step": 667
},
{
"epoch": 1.28352,
"grad_norm": 0.49308544397354126,
"learning_rate": 7.0622612243482035e-06,
"loss": 0.2895,
"step": 668
},
{
"epoch": 1.28544,
"grad_norm": 0.5363961458206177,
"learning_rate": 7.052064027263785e-06,
"loss": 0.2769,
"step": 669
},
{
"epoch": 1.28736,
"grad_norm": 0.5065466165542603,
"learning_rate": 7.041856555784421e-06,
"loss": 0.3052,
"step": 670
},
{
"epoch": 1.28928,
"grad_norm": 0.5964332222938538,
"learning_rate": 7.031638861017484e-06,
"loss": 0.3669,
"step": 671
},
{
"epoch": 1.2912,
"grad_norm": 0.45170000195503235,
"learning_rate": 7.021410994121525e-06,
"loss": 0.2903,
"step": 672
},
{
"epoch": 1.29312,
"grad_norm": 0.5211443305015564,
"learning_rate": 7.011173006306034e-06,
"loss": 0.365,
"step": 673
},
{
"epoch": 1.29504,
"grad_norm": 0.4566373825073242,
"learning_rate": 7.0009249488311685e-06,
"loss": 0.2695,
"step": 674
},
{
"epoch": 1.29696,
"grad_norm": 0.5880781412124634,
"learning_rate": 6.990666873007506e-06,
"loss": 0.3593,
"step": 675
},
{
"epoch": 1.29888,
"grad_norm": 0.5370779037475586,
"learning_rate": 6.980398830195785e-06,
"loss": 0.385,
"step": 676
},
{
"epoch": 1.3008,
"grad_norm": 0.4926360845565796,
"learning_rate": 6.970120871806647e-06,
"loss": 0.3048,
"step": 677
},
{
"epoch": 1.3027199999999999,
"grad_norm": 0.5478925704956055,
"learning_rate": 6.959833049300376e-06,
"loss": 0.3193,
"step": 678
},
{
"epoch": 1.30464,
"grad_norm": 0.5936509966850281,
"learning_rate": 6.949535414186651e-06,
"loss": 0.4452,
"step": 679
},
{
"epoch": 1.30656,
"grad_norm": 0.5100031495094299,
"learning_rate": 6.939228018024275e-06,
"loss": 0.2604,
"step": 680
},
{
"epoch": 1.3084799999999999,
"grad_norm": 0.5690284967422485,
"learning_rate": 6.9289109124209276e-06,
"loss": 0.3934,
"step": 681
},
{
"epoch": 1.3104,
"grad_norm": 0.5513601303100586,
"learning_rate": 6.918584149032899e-06,
"loss": 0.3462,
"step": 682
},
{
"epoch": 1.31232,
"grad_norm": 0.5744508504867554,
"learning_rate": 6.908247779564836e-06,
"loss": 0.3456,
"step": 683
},
{
"epoch": 1.31424,
"grad_norm": 0.520099937915802,
"learning_rate": 6.897901855769483e-06,
"loss": 0.3683,
"step": 684
},
{
"epoch": 1.31616,
"grad_norm": 0.49188148975372314,
"learning_rate": 6.887546429447418e-06,
"loss": 0.2711,
"step": 685
},
{
"epoch": 1.31808,
"grad_norm": 0.5550381541252136,
"learning_rate": 6.8771815524468e-06,
"loss": 0.3845,
"step": 686
},
{
"epoch": 1.32,
"grad_norm": 0.5297709703445435,
"learning_rate": 6.8668072766631054e-06,
"loss": 0.3295,
"step": 687
},
{
"epoch": 1.32192,
"grad_norm": 0.5166546106338501,
"learning_rate": 6.856423654038868e-06,
"loss": 0.3433,
"step": 688
},
{
"epoch": 1.3238400000000001,
"grad_norm": 0.47496888041496277,
"learning_rate": 6.8460307365634225e-06,
"loss": 0.2571,
"step": 689
},
{
"epoch": 1.32576,
"grad_norm": 0.5345900058746338,
"learning_rate": 6.8356285762726385e-06,
"loss": 0.3584,
"step": 690
},
{
"epoch": 1.32768,
"grad_norm": 0.4878763258457184,
"learning_rate": 6.825217225248664e-06,
"loss": 0.2983,
"step": 691
},
{
"epoch": 1.3296000000000001,
"grad_norm": 0.6528189778327942,
"learning_rate": 6.814796735619664e-06,
"loss": 0.4665,
"step": 692
},
{
"epoch": 1.33152,
"grad_norm": 0.4801347255706787,
"learning_rate": 6.804367159559561e-06,
"loss": 0.2949,
"step": 693
},
{
"epoch": 1.33344,
"grad_norm": 0.5306183695793152,
"learning_rate": 6.793928549287767e-06,
"loss": 0.3244,
"step": 694
},
{
"epoch": 1.33536,
"grad_norm": 0.45441702008247375,
"learning_rate": 6.783480957068934e-06,
"loss": 0.266,
"step": 695
},
{
"epoch": 1.33728,
"grad_norm": 0.5313470959663391,
"learning_rate": 6.773024435212678e-06,
"loss": 0.3653,
"step": 696
},
{
"epoch": 1.3392,
"grad_norm": 0.49432191252708435,
"learning_rate": 6.762559036073331e-06,
"loss": 0.3591,
"step": 697
},
{
"epoch": 1.34112,
"grad_norm": 0.47522586584091187,
"learning_rate": 6.75208481204967e-06,
"loss": 0.2978,
"step": 698
},
{
"epoch": 1.34304,
"grad_norm": 0.5183509588241577,
"learning_rate": 6.74160181558465e-06,
"loss": 0.3741,
"step": 699
},
{
"epoch": 1.34496,
"grad_norm": 0.5015780329704285,
"learning_rate": 6.731110099165165e-06,
"loss": 0.3444,
"step": 700
},
{
"epoch": 1.34688,
"grad_norm": 0.5314385294914246,
"learning_rate": 6.7206097153217474e-06,
"loss": 0.3271,
"step": 701
},
{
"epoch": 1.3488,
"grad_norm": 0.480180948972702,
"learning_rate": 6.710100716628345e-06,
"loss": 0.3094,
"step": 702
},
{
"epoch": 1.35072,
"grad_norm": 0.4878726303577423,
"learning_rate": 6.699583155702025e-06,
"loss": 0.3119,
"step": 703
},
{
"epoch": 1.35264,
"grad_norm": 0.5134080052375793,
"learning_rate": 6.689057085202737e-06,
"loss": 0.3064,
"step": 704
},
{
"epoch": 1.35456,
"grad_norm": 0.5401574373245239,
"learning_rate": 6.678522557833025e-06,
"loss": 0.424,
"step": 705
},
{
"epoch": 1.35648,
"grad_norm": 0.4826066493988037,
"learning_rate": 6.667979626337783e-06,
"loss": 0.3243,
"step": 706
},
{
"epoch": 1.3584,
"grad_norm": 0.5199458599090576,
"learning_rate": 6.6574283435039795e-06,
"loss": 0.3421,
"step": 707
},
{
"epoch": 1.36032,
"grad_norm": 0.4895918071269989,
"learning_rate": 6.646868762160399e-06,
"loss": 0.3213,
"step": 708
},
{
"epoch": 1.36224,
"grad_norm": 0.5179689526557922,
"learning_rate": 6.6363009351773755e-06,
"loss": 0.3229,
"step": 709
},
{
"epoch": 1.36416,
"grad_norm": 0.4855785369873047,
"learning_rate": 6.625724915466526e-06,
"loss": 0.3352,
"step": 710
},
{
"epoch": 1.36608,
"grad_norm": 0.48561304807662964,
"learning_rate": 6.615140755980491e-06,
"loss": 0.303,
"step": 711
},
{
"epoch": 1.3679999999999999,
"grad_norm": 0.5176676511764526,
"learning_rate": 6.6045485097126585e-06,
"loss": 0.3592,
"step": 712
},
{
"epoch": 1.36992,
"grad_norm": 0.5202802419662476,
"learning_rate": 6.593948229696915e-06,
"loss": 0.3245,
"step": 713
},
{
"epoch": 1.37184,
"grad_norm": 0.5198225378990173,
"learning_rate": 6.583339969007364e-06,
"loss": 0.3661,
"step": 714
},
{
"epoch": 1.3737599999999999,
"grad_norm": 0.5848777294158936,
"learning_rate": 6.572723780758069e-06,
"loss": 0.3779,
"step": 715
},
{
"epoch": 1.37568,
"grad_norm": 0.4997383654117584,
"learning_rate": 6.562099718102788e-06,
"loss": 0.3156,
"step": 716
},
{
"epoch": 1.3776,
"grad_norm": 0.48901376128196716,
"learning_rate": 6.551467834234702e-06,
"loss": 0.3408,
"step": 717
},
{
"epoch": 1.37952,
"grad_norm": 0.48844999074935913,
"learning_rate": 6.540828182386154e-06,
"loss": 0.3166,
"step": 718
},
{
"epoch": 1.38144,
"grad_norm": 0.5013572573661804,
"learning_rate": 6.530180815828377e-06,
"loss": 0.3298,
"step": 719
},
{
"epoch": 1.38336,
"grad_norm": 0.4457682967185974,
"learning_rate": 6.519525787871235e-06,
"loss": 0.2907,
"step": 720
},
{
"epoch": 1.38528,
"grad_norm": 0.5129805207252502,
"learning_rate": 6.508863151862947e-06,
"loss": 0.3307,
"step": 721
},
{
"epoch": 1.3872,
"grad_norm": 0.4913400709629059,
"learning_rate": 6.498192961189829e-06,
"loss": 0.3587,
"step": 722
},
{
"epoch": 1.3891200000000001,
"grad_norm": 0.5093926787376404,
"learning_rate": 6.487515269276015e-06,
"loss": 0.3104,
"step": 723
},
{
"epoch": 1.39104,
"grad_norm": 0.49991241097450256,
"learning_rate": 6.476830129583207e-06,
"loss": 0.3241,
"step": 724
},
{
"epoch": 1.39296,
"grad_norm": 0.4793432950973511,
"learning_rate": 6.466137595610388e-06,
"loss": 0.3567,
"step": 725
},
{
"epoch": 1.3948800000000001,
"grad_norm": 0.5001140832901001,
"learning_rate": 6.455437720893565e-06,
"loss": 0.341,
"step": 726
},
{
"epoch": 1.3968,
"grad_norm": 0.5276058912277222,
"learning_rate": 6.444730559005504e-06,
"loss": 0.3208,
"step": 727
},
{
"epoch": 1.39872,
"grad_norm": 0.476131409406662,
"learning_rate": 6.434016163555452e-06,
"loss": 0.3054,
"step": 728
},
{
"epoch": 1.40064,
"grad_norm": 0.536666989326477,
"learning_rate": 6.423294588188874e-06,
"loss": 0.343,
"step": 729
},
{
"epoch": 1.40256,
"grad_norm": 0.49306192994117737,
"learning_rate": 6.412565886587186e-06,
"loss": 0.2851,
"step": 730
},
{
"epoch": 1.40448,
"grad_norm": 0.4840988218784332,
"learning_rate": 6.401830112467479e-06,
"loss": 0.3193,
"step": 731
},
{
"epoch": 1.4064,
"grad_norm": 0.5222987532615662,
"learning_rate": 6.391087319582264e-06,
"loss": 0.3306,
"step": 732
},
{
"epoch": 1.40832,
"grad_norm": 0.5462629199028015,
"learning_rate": 6.380337561719184e-06,
"loss": 0.3492,
"step": 733
},
{
"epoch": 1.41024,
"grad_norm": 0.5185136795043945,
"learning_rate": 6.369580892700763e-06,
"loss": 0.3552,
"step": 734
},
{
"epoch": 1.41216,
"grad_norm": 0.5025428533554077,
"learning_rate": 6.358817366384122e-06,
"loss": 0.3339,
"step": 735
},
{
"epoch": 1.41408,
"grad_norm": 0.4475882351398468,
"learning_rate": 6.34804703666072e-06,
"loss": 0.2778,
"step": 736
},
{
"epoch": 1.416,
"grad_norm": 0.5565717220306396,
"learning_rate": 6.337269957456074e-06,
"loss": 0.3663,
"step": 737
},
{
"epoch": 1.41792,
"grad_norm": 0.5087977647781372,
"learning_rate": 6.326486182729504e-06,
"loss": 0.3511,
"step": 738
},
{
"epoch": 1.41984,
"grad_norm": 0.4851036071777344,
"learning_rate": 6.315695766473844e-06,
"loss": 0.3253,
"step": 739
},
{
"epoch": 1.42176,
"grad_norm": 0.5436884164810181,
"learning_rate": 6.304898762715187e-06,
"loss": 0.3685,
"step": 740
},
{
"epoch": 1.42368,
"grad_norm": 0.5147407054901123,
"learning_rate": 6.294095225512604e-06,
"loss": 0.3304,
"step": 741
},
{
"epoch": 1.4256,
"grad_norm": 0.5282286405563354,
"learning_rate": 6.283285208957884e-06,
"loss": 0.4189,
"step": 742
},
{
"epoch": 1.42752,
"grad_norm": 0.4964216649532318,
"learning_rate": 6.272468767175255e-06,
"loss": 0.3076,
"step": 743
},
{
"epoch": 1.42944,
"grad_norm": 0.5682664513587952,
"learning_rate": 6.261645954321109e-06,
"loss": 0.3984,
"step": 744
},
{
"epoch": 1.43136,
"grad_norm": 0.5285937786102295,
"learning_rate": 6.2508168245837476e-06,
"loss": 0.3285,
"step": 745
},
{
"epoch": 1.4332799999999999,
"grad_norm": 0.5045657753944397,
"learning_rate": 6.239981432183093e-06,
"loss": 0.3543,
"step": 746
},
{
"epoch": 1.4352,
"grad_norm": 0.5176907777786255,
"learning_rate": 6.2291398313704265e-06,
"loss": 0.3333,
"step": 747
},
{
"epoch": 1.43712,
"grad_norm": 0.4658200740814209,
"learning_rate": 6.21829207642811e-06,
"loss": 0.2905,
"step": 748
},
{
"epoch": 1.4390399999999999,
"grad_norm": 0.48745599389076233,
"learning_rate": 6.207438221669325e-06,
"loss": 0.3166,
"step": 749
},
{
"epoch": 1.44096,
"grad_norm": 0.508639395236969,
"learning_rate": 6.1965783214377895e-06,
"loss": 0.3332,
"step": 750
},
{
"epoch": 1.44288,
"grad_norm": 0.4614761769771576,
"learning_rate": 6.185712430107489e-06,
"loss": 0.2848,
"step": 751
},
{
"epoch": 1.4447999999999999,
"grad_norm": 0.5315017104148865,
"learning_rate": 6.1748406020824115e-06,
"loss": 0.3897,
"step": 752
},
{
"epoch": 1.44672,
"grad_norm": 0.4943656325340271,
"learning_rate": 6.163962891796261e-06,
"loss": 0.3478,
"step": 753
},
{
"epoch": 1.44864,
"grad_norm": 0.47768330574035645,
"learning_rate": 6.153079353712201e-06,
"loss": 0.3005,
"step": 754
},
{
"epoch": 1.45056,
"grad_norm": 0.5421534776687622,
"learning_rate": 6.142190042322569e-06,
"loss": 0.3967,
"step": 755
},
{
"epoch": 1.45248,
"grad_norm": 0.5950009822845459,
"learning_rate": 6.131295012148613e-06,
"loss": 0.3844,
"step": 756
},
{
"epoch": 1.4544000000000001,
"grad_norm": 0.5095462799072266,
"learning_rate": 6.120394317740205e-06,
"loss": 0.377,
"step": 757
},
{
"epoch": 1.45632,
"grad_norm": 0.5045623779296875,
"learning_rate": 6.1094880136755886e-06,
"loss": 0.3905,
"step": 758
},
{
"epoch": 1.45824,
"grad_norm": 0.49445492029190063,
"learning_rate": 6.0985761545610865e-06,
"loss": 0.2757,
"step": 759
},
{
"epoch": 1.4601600000000001,
"grad_norm": 0.4865478575229645,
"learning_rate": 6.087658795030838e-06,
"loss": 0.3439,
"step": 760
},
{
"epoch": 1.46208,
"grad_norm": 0.5209829211235046,
"learning_rate": 6.076735989746522e-06,
"loss": 0.3429,
"step": 761
},
{
"epoch": 1.464,
"grad_norm": 0.4884992241859436,
"learning_rate": 6.065807793397081e-06,
"loss": 0.3175,
"step": 762
},
{
"epoch": 1.4659200000000001,
"grad_norm": 0.527862012386322,
"learning_rate": 6.0548742606984545e-06,
"loss": 0.3611,
"step": 763
},
{
"epoch": 1.46784,
"grad_norm": 0.5010285973548889,
"learning_rate": 6.043935446393294e-06,
"loss": 0.3894,
"step": 764
},
{
"epoch": 1.46976,
"grad_norm": 0.4742327332496643,
"learning_rate": 6.032991405250702e-06,
"loss": 0.2767,
"step": 765
},
{
"epoch": 1.47168,
"grad_norm": 0.4939432740211487,
"learning_rate": 6.022042192065946e-06,
"loss": 0.3158,
"step": 766
},
{
"epoch": 1.4736,
"grad_norm": 0.5018529891967773,
"learning_rate": 6.011087861660191e-06,
"loss": 0.3518,
"step": 767
},
{
"epoch": 1.47552,
"grad_norm": 0.5207298398017883,
"learning_rate": 6.000128468880223e-06,
"loss": 0.3411,
"step": 768
},
{
"epoch": 1.47744,
"grad_norm": 0.5143241286277771,
"learning_rate": 5.989164068598175e-06,
"loss": 0.3699,
"step": 769
},
{
"epoch": 1.47936,
"grad_norm": 0.4441927969455719,
"learning_rate": 5.978194715711254e-06,
"loss": 0.2616,
"step": 770
},
{
"epoch": 1.48128,
"grad_norm": 0.516179084777832,
"learning_rate": 5.9672204651414564e-06,
"loss": 0.3377,
"step": 771
},
{
"epoch": 1.4832,
"grad_norm": 0.49691465497016907,
"learning_rate": 5.956241371835312e-06,
"loss": 0.3388,
"step": 772
},
{
"epoch": 1.48512,
"grad_norm": 0.5426936745643616,
"learning_rate": 5.945257490763588e-06,
"loss": 0.3341,
"step": 773
},
{
"epoch": 1.48704,
"grad_norm": 0.49718064069747925,
"learning_rate": 5.934268876921026e-06,
"loss": 0.3113,
"step": 774
},
{
"epoch": 1.48896,
"grad_norm": 0.5105945467948914,
"learning_rate": 5.923275585326064e-06,
"loss": 0.3371,
"step": 775
},
{
"epoch": 1.49088,
"grad_norm": 0.46768656373023987,
"learning_rate": 5.912277671020564e-06,
"loss": 0.3048,
"step": 776
},
{
"epoch": 1.4928,
"grad_norm": 0.48849251866340637,
"learning_rate": 5.90127518906953e-06,
"loss": 0.3069,
"step": 777
},
{
"epoch": 1.49472,
"grad_norm": 0.5047224760055542,
"learning_rate": 5.890268194560834e-06,
"loss": 0.3378,
"step": 778
},
{
"epoch": 1.49664,
"grad_norm": 0.5166555047035217,
"learning_rate": 5.8792567426049465e-06,
"loss": 0.4027,
"step": 779
},
{
"epoch": 1.49856,
"grad_norm": 0.5011685490608215,
"learning_rate": 5.8682408883346535e-06,
"loss": 0.3314,
"step": 780
},
{
"epoch": 1.50048,
"grad_norm": 0.4982200860977173,
"learning_rate": 5.857220686904779e-06,
"loss": 0.3168,
"step": 781
},
{
"epoch": 1.5024,
"grad_norm": 0.5978631377220154,
"learning_rate": 5.846196193491919e-06,
"loss": 0.3655,
"step": 782
},
{
"epoch": 1.5043199999999999,
"grad_norm": 0.5696616768836975,
"learning_rate": 5.835167463294155e-06,
"loss": 0.4043,
"step": 783
},
{
"epoch": 1.50624,
"grad_norm": 0.5210936069488525,
"learning_rate": 5.824134551530783e-06,
"loss": 0.3152,
"step": 784
},
{
"epoch": 1.50816,
"grad_norm": 0.5195565819740295,
"learning_rate": 5.813097513442035e-06,
"loss": 0.3608,
"step": 785
},
{
"epoch": 1.5100799999999999,
"grad_norm": 0.499337762594223,
"learning_rate": 5.8020564042888015e-06,
"loss": 0.3502,
"step": 786
},
{
"epoch": 1.512,
"grad_norm": 0.48681673407554626,
"learning_rate": 5.79101127935236e-06,
"loss": 0.3028,
"step": 787
},
{
"epoch": 1.5139200000000002,
"grad_norm": 0.4867977499961853,
"learning_rate": 5.77996219393409e-06,
"loss": 0.318,
"step": 788
},
{
"epoch": 1.5158399999999999,
"grad_norm": 0.48680126667022705,
"learning_rate": 5.768909203355203e-06,
"loss": 0.3408,
"step": 789
},
{
"epoch": 1.51776,
"grad_norm": 0.47579270601272583,
"learning_rate": 5.757852362956463e-06,
"loss": 0.2996,
"step": 790
},
{
"epoch": 1.5196800000000001,
"grad_norm": 0.48116764426231384,
"learning_rate": 5.7467917280979105e-06,
"loss": 0.3339,
"step": 791
},
{
"epoch": 1.5215999999999998,
"grad_norm": 0.5041956305503845,
"learning_rate": 5.735727354158581e-06,
"loss": 0.3355,
"step": 792
},
{
"epoch": 1.52352,
"grad_norm": 0.5426154732704163,
"learning_rate": 5.724659296536234e-06,
"loss": 0.3773,
"step": 793
},
{
"epoch": 1.5254400000000001,
"grad_norm": 0.45001599192619324,
"learning_rate": 5.713587610647073e-06,
"loss": 0.3112,
"step": 794
},
{
"epoch": 1.52736,
"grad_norm": 0.46268171072006226,
"learning_rate": 5.7025123519254644e-06,
"loss": 0.3198,
"step": 795
},
{
"epoch": 1.52928,
"grad_norm": 0.475282222032547,
"learning_rate": 5.6914335758236665e-06,
"loss": 0.3287,
"step": 796
},
{
"epoch": 1.5312000000000001,
"grad_norm": 0.49156060814857483,
"learning_rate": 5.680351337811547e-06,
"loss": 0.3359,
"step": 797
},
{
"epoch": 1.53312,
"grad_norm": 0.47198373079299927,
"learning_rate": 5.669265693376309e-06,
"loss": 0.3014,
"step": 798
},
{
"epoch": 1.53504,
"grad_norm": 0.5667093396186829,
"learning_rate": 5.658176698022208e-06,
"loss": 0.4129,
"step": 799
},
{
"epoch": 1.53696,
"grad_norm": 0.452726274728775,
"learning_rate": 5.647084407270277e-06,
"loss": 0.2851,
"step": 800
},
{
"epoch": 1.53888,
"grad_norm": 0.4933633506298065,
"learning_rate": 5.6359888766580555e-06,
"loss": 0.3302,
"step": 801
},
{
"epoch": 1.5408,
"grad_norm": 0.49544256925582886,
"learning_rate": 5.624890161739292e-06,
"loss": 0.3597,
"step": 802
},
{
"epoch": 1.54272,
"grad_norm": 0.4835258424282074,
"learning_rate": 5.6137883180836925e-06,
"loss": 0.3273,
"step": 803
},
{
"epoch": 1.54464,
"grad_norm": 0.5229555368423462,
"learning_rate": 5.6026834012766155e-06,
"loss": 0.3573,
"step": 804
},
{
"epoch": 1.54656,
"grad_norm": 0.4861273169517517,
"learning_rate": 5.591575466918816e-06,
"loss": 0.365,
"step": 805
},
{
"epoch": 1.54848,
"grad_norm": 0.49933966994285583,
"learning_rate": 5.5804645706261515e-06,
"loss": 0.3113,
"step": 806
},
{
"epoch": 1.5504,
"grad_norm": 0.5060083866119385,
"learning_rate": 5.569350768029312e-06,
"loss": 0.3359,
"step": 807
},
{
"epoch": 1.55232,
"grad_norm": 0.5194623470306396,
"learning_rate": 5.5582341147735396e-06,
"loss": 0.339,
"step": 808
},
{
"epoch": 1.55424,
"grad_norm": 0.4615398347377777,
"learning_rate": 5.5471146665183455e-06,
"loss": 0.2599,
"step": 809
},
{
"epoch": 1.55616,
"grad_norm": 0.503343939781189,
"learning_rate": 5.53599247893724e-06,
"loss": 0.3092,
"step": 810
},
{
"epoch": 1.55808,
"grad_norm": 0.5048611760139465,
"learning_rate": 5.524867607717445e-06,
"loss": 0.318,
"step": 811
},
{
"epoch": 1.56,
"grad_norm": 0.43579375743865967,
"learning_rate": 5.5137401085596224e-06,
"loss": 0.2755,
"step": 812
},
{
"epoch": 1.56192,
"grad_norm": 0.4704350233078003,
"learning_rate": 5.502610037177586e-06,
"loss": 0.2999,
"step": 813
},
{
"epoch": 1.56384,
"grad_norm": 0.5115846991539001,
"learning_rate": 5.491477449298036e-06,
"loss": 0.3026,
"step": 814
},
{
"epoch": 1.56576,
"grad_norm": 0.5706815123558044,
"learning_rate": 5.480342400660268e-06,
"loss": 0.4041,
"step": 815
},
{
"epoch": 1.56768,
"grad_norm": 0.47845515608787537,
"learning_rate": 5.469204947015897e-06,
"loss": 0.3313,
"step": 816
},
{
"epoch": 1.5695999999999999,
"grad_norm": 0.5432759523391724,
"learning_rate": 5.458065144128584e-06,
"loss": 0.3293,
"step": 817
},
{
"epoch": 1.57152,
"grad_norm": 0.4986203908920288,
"learning_rate": 5.4469230477737466e-06,
"loss": 0.3109,
"step": 818
},
{
"epoch": 1.57344,
"grad_norm": 0.5080354809761047,
"learning_rate": 5.435778713738292e-06,
"loss": 0.3655,
"step": 819
},
{
"epoch": 1.5753599999999999,
"grad_norm": 0.489967405796051,
"learning_rate": 5.424632197820325e-06,
"loss": 0.2948,
"step": 820
},
{
"epoch": 1.57728,
"grad_norm": 0.47338807582855225,
"learning_rate": 5.413483555828879e-06,
"loss": 0.2971,
"step": 821
},
{
"epoch": 1.5792000000000002,
"grad_norm": 0.48291516304016113,
"learning_rate": 5.402332843583631e-06,
"loss": 0.3365,
"step": 822
},
{
"epoch": 1.5811199999999999,
"grad_norm": 0.518580973148346,
"learning_rate": 5.391180116914621e-06,
"loss": 0.3435,
"step": 823
},
{
"epoch": 1.58304,
"grad_norm": 0.5276509523391724,
"learning_rate": 5.380025431661981e-06,
"loss": 0.2984,
"step": 824
},
{
"epoch": 1.5849600000000001,
"grad_norm": 0.5257059931755066,
"learning_rate": 5.368868843675642e-06,
"loss": 0.3113,
"step": 825
},
{
"epoch": 1.5868799999999998,
"grad_norm": 0.4884243309497833,
"learning_rate": 5.3577104088150685e-06,
"loss": 0.2926,
"step": 826
},
{
"epoch": 1.5888,
"grad_norm": 0.5111277103424072,
"learning_rate": 5.346550182948966e-06,
"loss": 0.3282,
"step": 827
},
{
"epoch": 1.5907200000000001,
"grad_norm": 0.5008331537246704,
"learning_rate": 5.335388221955012e-06,
"loss": 0.3563,
"step": 828
},
{
"epoch": 1.5926399999999998,
"grad_norm": 0.48651450872421265,
"learning_rate": 5.3242245817195705e-06,
"loss": 0.3,
"step": 829
},
{
"epoch": 1.59456,
"grad_norm": 0.5195477604866028,
"learning_rate": 5.31305931813741e-06,
"loss": 0.3585,
"step": 830
},
{
"epoch": 1.5964800000000001,
"grad_norm": 0.5778400301933289,
"learning_rate": 5.301892487111431e-06,
"loss": 0.3688,
"step": 831
},
{
"epoch": 1.5984,
"grad_norm": 0.6022984385490417,
"learning_rate": 5.290724144552379e-06,
"loss": 0.3659,
"step": 832
},
{
"epoch": 1.60032,
"grad_norm": 0.5121864676475525,
"learning_rate": 5.279554346378572e-06,
"loss": 0.3284,
"step": 833
},
{
"epoch": 1.60224,
"grad_norm": 0.561998188495636,
"learning_rate": 5.268383148515608e-06,
"loss": 0.3142,
"step": 834
},
{
"epoch": 1.60416,
"grad_norm": 0.5142027139663696,
"learning_rate": 5.257210606896102e-06,
"loss": 0.3209,
"step": 835
},
{
"epoch": 1.60608,
"grad_norm": 0.49522408843040466,
"learning_rate": 5.246036777459391e-06,
"loss": 0.293,
"step": 836
},
{
"epoch": 1.608,
"grad_norm": 0.5389915108680725,
"learning_rate": 5.234861716151264e-06,
"loss": 0.3894,
"step": 837
},
{
"epoch": 1.60992,
"grad_norm": 0.5348629355430603,
"learning_rate": 5.223685478923671e-06,
"loss": 0.3812,
"step": 838
},
{
"epoch": 1.61184,
"grad_norm": 0.5028142333030701,
"learning_rate": 5.2125081217344595e-06,
"loss": 0.3477,
"step": 839
},
{
"epoch": 1.61376,
"grad_norm": 0.5176277756690979,
"learning_rate": 5.201329700547077e-06,
"loss": 0.3298,
"step": 840
},
{
"epoch": 1.61568,
"grad_norm": 0.46566569805145264,
"learning_rate": 5.190150271330298e-06,
"loss": 0.3033,
"step": 841
},
{
"epoch": 1.6176,
"grad_norm": 0.5451703667640686,
"learning_rate": 5.178969890057953e-06,
"loss": 0.381,
"step": 842
},
{
"epoch": 1.61952,
"grad_norm": 0.4964999854564667,
"learning_rate": 5.167788612708627e-06,
"loss": 0.3233,
"step": 843
},
{
"epoch": 1.62144,
"grad_norm": 0.5038464665412903,
"learning_rate": 5.156606495265402e-06,
"loss": 0.3526,
"step": 844
},
{
"epoch": 1.62336,
"grad_norm": 0.4706322252750397,
"learning_rate": 5.145423593715558e-06,
"loss": 0.276,
"step": 845
},
{
"epoch": 1.62528,
"grad_norm": 0.49024492502212524,
"learning_rate": 5.1342399640503074e-06,
"loss": 0.3057,
"step": 846
},
{
"epoch": 1.6272,
"grad_norm": 0.5155479311943054,
"learning_rate": 5.1230556622645026e-06,
"loss": 0.3417,
"step": 847
},
{
"epoch": 1.62912,
"grad_norm": 0.5265889167785645,
"learning_rate": 5.111870744356366e-06,
"loss": 0.2967,
"step": 848
},
{
"epoch": 1.63104,
"grad_norm": 0.4517934024333954,
"learning_rate": 5.100685266327202e-06,
"loss": 0.3075,
"step": 849
},
{
"epoch": 1.63296,
"grad_norm": 0.4820931553840637,
"learning_rate": 5.0894992841811216e-06,
"loss": 0.3446,
"step": 850
},
{
"epoch": 1.6348799999999999,
"grad_norm": 0.5165117979049683,
"learning_rate": 5.0783128539247585e-06,
"loss": 0.3333,
"step": 851
},
{
"epoch": 1.6368,
"grad_norm": 0.5351890921592712,
"learning_rate": 5.067126031566988e-06,
"loss": 0.2992,
"step": 852
},
{
"epoch": 1.63872,
"grad_norm": 0.5077692866325378,
"learning_rate": 5.055938873118653e-06,
"loss": 0.3155,
"step": 853
},
{
"epoch": 1.6406399999999999,
"grad_norm": 0.5138870477676392,
"learning_rate": 5.044751434592274e-06,
"loss": 0.3291,
"step": 854
},
{
"epoch": 1.64256,
"grad_norm": 0.4901105463504791,
"learning_rate": 5.033563772001782e-06,
"loss": 0.3474,
"step": 855
},
{
"epoch": 1.6444800000000002,
"grad_norm": 0.4643681049346924,
"learning_rate": 5.022375941362218e-06,
"loss": 0.2766,
"step": 856
},
{
"epoch": 1.6463999999999999,
"grad_norm": 0.5424699187278748,
"learning_rate": 5.011187998689474e-06,
"loss": 0.3218,
"step": 857
},
{
"epoch": 1.64832,
"grad_norm": 0.5369173288345337,
"learning_rate": 5e-06,
"loss": 0.3348,
"step": 858
},
{
"epoch": 1.6502400000000002,
"grad_norm": 0.6143074035644531,
"learning_rate": 4.988812001310528e-06,
"loss": 0.412,
"step": 859
},
{
"epoch": 1.6521599999999999,
"grad_norm": 0.5159322619438171,
"learning_rate": 4.977624058637783e-06,
"loss": 0.316,
"step": 860
},
{
"epoch": 1.65408,
"grad_norm": 0.5611573457717896,
"learning_rate": 4.9664362279982205e-06,
"loss": 0.3107,
"step": 861
},
{
"epoch": 1.6560000000000001,
"grad_norm": 0.5152172446250916,
"learning_rate": 4.955248565407727e-06,
"loss": 0.3531,
"step": 862
},
{
"epoch": 1.6579199999999998,
"grad_norm": 0.6682543754577637,
"learning_rate": 4.944061126881348e-06,
"loss": 0.3984,
"step": 863
},
{
"epoch": 1.65984,
"grad_norm": 0.5073124766349792,
"learning_rate": 4.932873968433014e-06,
"loss": 0.3208,
"step": 864
},
{
"epoch": 1.6617600000000001,
"grad_norm": 0.5391241312026978,
"learning_rate": 4.921687146075244e-06,
"loss": 0.3505,
"step": 865
},
{
"epoch": 1.66368,
"grad_norm": 0.49559295177459717,
"learning_rate": 4.910500715818879e-06,
"loss": 0.3207,
"step": 866
},
{
"epoch": 1.6656,
"grad_norm": 0.5040885806083679,
"learning_rate": 4.8993147336728e-06,
"loss": 0.3399,
"step": 867
},
{
"epoch": 1.6675200000000001,
"grad_norm": 0.541750967502594,
"learning_rate": 4.8881292556436355e-06,
"loss": 0.3666,
"step": 868
},
{
"epoch": 1.66944,
"grad_norm": 0.46893709897994995,
"learning_rate": 4.876944337735499e-06,
"loss": 0.273,
"step": 869
},
{
"epoch": 1.67136,
"grad_norm": 0.5051565170288086,
"learning_rate": 4.865760035949695e-06,
"loss": 0.2681,
"step": 870
},
{
"epoch": 1.67328,
"grad_norm": 0.5563918352127075,
"learning_rate": 4.854576406284443e-06,
"loss": 0.3398,
"step": 871
},
{
"epoch": 1.6752,
"grad_norm": 0.5165048241615295,
"learning_rate": 4.8433935047346e-06,
"loss": 0.3245,
"step": 872
},
{
"epoch": 1.67712,
"grad_norm": 0.5090420246124268,
"learning_rate": 4.832211387291374e-06,
"loss": 0.3112,
"step": 873
},
{
"epoch": 1.67904,
"grad_norm": 0.5153842568397522,
"learning_rate": 4.821030109942048e-06,
"loss": 0.3131,
"step": 874
},
{
"epoch": 1.68096,
"grad_norm": 0.540823221206665,
"learning_rate": 4.8098497286697024e-06,
"loss": 0.3351,
"step": 875
},
{
"epoch": 1.68288,
"grad_norm": 0.5578463077545166,
"learning_rate": 4.798670299452926e-06,
"loss": 0.3676,
"step": 876
},
{
"epoch": 1.6848,
"grad_norm": 0.517647922039032,
"learning_rate": 4.787491878265542e-06,
"loss": 0.3381,
"step": 877
},
{
"epoch": 1.68672,
"grad_norm": 0.5597872138023376,
"learning_rate": 4.77631452107633e-06,
"loss": 0.3375,
"step": 878
},
{
"epoch": 1.68864,
"grad_norm": 0.5610257983207703,
"learning_rate": 4.765138283848739e-06,
"loss": 0.3431,
"step": 879
},
{
"epoch": 1.69056,
"grad_norm": 0.4696837067604065,
"learning_rate": 4.75396322254061e-06,
"loss": 0.2771,
"step": 880
},
{
"epoch": 1.69248,
"grad_norm": 0.45718705654144287,
"learning_rate": 4.742789393103899e-06,
"loss": 0.2912,
"step": 881
},
{
"epoch": 1.6944,
"grad_norm": 0.5467259287834167,
"learning_rate": 4.731616851484392e-06,
"loss": 0.3504,
"step": 882
},
{
"epoch": 1.69632,
"grad_norm": 0.5008785724639893,
"learning_rate": 4.720445653621429e-06,
"loss": 0.3022,
"step": 883
},
{
"epoch": 1.69824,
"grad_norm": 0.5096748471260071,
"learning_rate": 4.7092758554476215e-06,
"loss": 0.3174,
"step": 884
},
{
"epoch": 1.70016,
"grad_norm": 0.4579527676105499,
"learning_rate": 4.69810751288857e-06,
"loss": 0.2696,
"step": 885
},
{
"epoch": 1.70208,
"grad_norm": 0.5174343585968018,
"learning_rate": 4.686940681862591e-06,
"loss": 0.3524,
"step": 886
},
{
"epoch": 1.704,
"grad_norm": 0.5016630291938782,
"learning_rate": 4.675775418280432e-06,
"loss": 0.331,
"step": 887
},
{
"epoch": 1.7059199999999999,
"grad_norm": 0.5359956622123718,
"learning_rate": 4.664611778044988e-06,
"loss": 0.3815,
"step": 888
},
{
"epoch": 1.70784,
"grad_norm": 0.5070806741714478,
"learning_rate": 4.653449817051035e-06,
"loss": 0.3678,
"step": 889
},
{
"epoch": 1.70976,
"grad_norm": 0.47846880555152893,
"learning_rate": 4.642289591184934e-06,
"loss": 0.3491,
"step": 890
},
{
"epoch": 1.7116799999999999,
"grad_norm": 0.49447137117385864,
"learning_rate": 4.631131156324359e-06,
"loss": 0.3313,
"step": 891
},
{
"epoch": 1.7136,
"grad_norm": 0.4946858584880829,
"learning_rate": 4.619974568338021e-06,
"loss": 0.3325,
"step": 892
},
{
"epoch": 1.7155200000000002,
"grad_norm": 0.44752758741378784,
"learning_rate": 4.60881988308538e-06,
"loss": 0.3232,
"step": 893
},
{
"epoch": 1.7174399999999999,
"grad_norm": 0.5172149538993835,
"learning_rate": 4.597667156416371e-06,
"loss": 0.3322,
"step": 894
},
{
"epoch": 1.71936,
"grad_norm": 0.5213111042976379,
"learning_rate": 4.586516444171123e-06,
"loss": 0.32,
"step": 895
},
{
"epoch": 1.7212800000000001,
"grad_norm": 0.47677525877952576,
"learning_rate": 4.575367802179675e-06,
"loss": 0.339,
"step": 896
},
{
"epoch": 1.7231999999999998,
"grad_norm": 0.48992615938186646,
"learning_rate": 4.564221286261709e-06,
"loss": 0.2849,
"step": 897
},
{
"epoch": 1.72512,
"grad_norm": 0.48835301399230957,
"learning_rate": 4.553076952226255e-06,
"loss": 0.3077,
"step": 898
},
{
"epoch": 1.7270400000000001,
"grad_norm": 0.5285360813140869,
"learning_rate": 4.541934855871417e-06,
"loss": 0.3675,
"step": 899
},
{
"epoch": 1.72896,
"grad_norm": 0.5021538734436035,
"learning_rate": 4.530795052984104e-06,
"loss": 0.3431,
"step": 900
},
{
"epoch": 1.73088,
"grad_norm": 0.49882856011390686,
"learning_rate": 4.519657599339735e-06,
"loss": 0.3714,
"step": 901
},
{
"epoch": 1.7328000000000001,
"grad_norm": 0.5149997472763062,
"learning_rate": 4.508522550701965e-06,
"loss": 0.3755,
"step": 902
},
{
"epoch": 1.73472,
"grad_norm": 0.4590727984905243,
"learning_rate": 4.497389962822416e-06,
"loss": 0.3036,
"step": 903
},
{
"epoch": 1.73664,
"grad_norm": 0.48678267002105713,
"learning_rate": 4.48625989144038e-06,
"loss": 0.3441,
"step": 904
},
{
"epoch": 1.73856,
"grad_norm": 0.535102128982544,
"learning_rate": 4.475132392282556e-06,
"loss": 0.3851,
"step": 905
},
{
"epoch": 1.74048,
"grad_norm": 0.4970477223396301,
"learning_rate": 4.464007521062761e-06,
"loss": 0.3555,
"step": 906
},
{
"epoch": 1.7424,
"grad_norm": 0.5122007131576538,
"learning_rate": 4.452885333481657e-06,
"loss": 0.3503,
"step": 907
},
{
"epoch": 1.74432,
"grad_norm": 0.4928925931453705,
"learning_rate": 4.441765885226462e-06,
"loss": 0.2831,
"step": 908
},
{
"epoch": 1.74624,
"grad_norm": 0.4829672574996948,
"learning_rate": 4.4306492319706895e-06,
"loss": 0.3522,
"step": 909
},
{
"epoch": 1.74816,
"grad_norm": 0.4782569408416748,
"learning_rate": 4.4195354293738484e-06,
"loss": 0.3078,
"step": 910
},
{
"epoch": 1.75008,
"grad_norm": 0.4939521551132202,
"learning_rate": 4.4084245330811855e-06,
"loss": 0.3467,
"step": 911
},
{
"epoch": 1.752,
"grad_norm": 0.4371122121810913,
"learning_rate": 4.397316598723385e-06,
"loss": 0.2529,
"step": 912
},
{
"epoch": 1.75392,
"grad_norm": 0.4719119071960449,
"learning_rate": 4.386211681916309e-06,
"loss": 0.3452,
"step": 913
},
{
"epoch": 1.75584,
"grad_norm": 0.5364863276481628,
"learning_rate": 4.3751098382607084e-06,
"loss": 0.4366,
"step": 914
},
{
"epoch": 1.75776,
"grad_norm": 0.47348180413246155,
"learning_rate": 4.364011123341947e-06,
"loss": 0.3212,
"step": 915
},
{
"epoch": 1.75968,
"grad_norm": 0.5132121443748474,
"learning_rate": 4.352915592729723e-06,
"loss": 0.3658,
"step": 916
},
{
"epoch": 1.7616,
"grad_norm": 0.5088778734207153,
"learning_rate": 4.341823301977794e-06,
"loss": 0.3661,
"step": 917
},
{
"epoch": 1.76352,
"grad_norm": 0.5200909972190857,
"learning_rate": 4.330734306623694e-06,
"loss": 0.3499,
"step": 918
},
{
"epoch": 1.76544,
"grad_norm": 0.47741684317588806,
"learning_rate": 4.319648662188453e-06,
"loss": 0.3263,
"step": 919
},
{
"epoch": 1.76736,
"grad_norm": 0.5078963041305542,
"learning_rate": 4.308566424176336e-06,
"loss": 0.349,
"step": 920
},
{
"epoch": 1.76928,
"grad_norm": 0.5128167271614075,
"learning_rate": 4.297487648074538e-06,
"loss": 0.3324,
"step": 921
},
{
"epoch": 1.7711999999999999,
"grad_norm": 0.46648016571998596,
"learning_rate": 4.286412389352929e-06,
"loss": 0.3143,
"step": 922
},
{
"epoch": 1.77312,
"grad_norm": 0.44232723116874695,
"learning_rate": 4.275340703463767e-06,
"loss": 0.3081,
"step": 923
},
{
"epoch": 1.77504,
"grad_norm": 0.539298951625824,
"learning_rate": 4.264272645841419e-06,
"loss": 0.3853,
"step": 924
},
{
"epoch": 1.7769599999999999,
"grad_norm": 0.4651195704936981,
"learning_rate": 4.253208271902091e-06,
"loss": 0.3121,
"step": 925
},
{
"epoch": 1.77888,
"grad_norm": 0.49035730957984924,
"learning_rate": 4.242147637043539e-06,
"loss": 0.329,
"step": 926
},
{
"epoch": 1.7808000000000002,
"grad_norm": 0.46691808104515076,
"learning_rate": 4.231090796644798e-06,
"loss": 0.3071,
"step": 927
},
{
"epoch": 1.7827199999999999,
"grad_norm": 0.5323026180267334,
"learning_rate": 4.220037806065911e-06,
"loss": 0.367,
"step": 928
},
{
"epoch": 1.78464,
"grad_norm": 0.494438111782074,
"learning_rate": 4.208988720647642e-06,
"loss": 0.3284,
"step": 929
},
{
"epoch": 1.7865600000000001,
"grad_norm": 0.556074321269989,
"learning_rate": 4.1979435957111984e-06,
"loss": 0.3552,
"step": 930
},
{
"epoch": 1.7884799999999998,
"grad_norm": 0.4797959327697754,
"learning_rate": 4.1869024865579664e-06,
"loss": 0.3381,
"step": 931
},
{
"epoch": 1.7904,
"grad_norm": 0.5070111751556396,
"learning_rate": 4.175865448469219e-06,
"loss": 0.3269,
"step": 932
},
{
"epoch": 1.7923200000000001,
"grad_norm": 0.5801342129707336,
"learning_rate": 4.164832536705845e-06,
"loss": 0.3655,
"step": 933
},
{
"epoch": 1.7942399999999998,
"grad_norm": 0.4841829836368561,
"learning_rate": 4.153803806508083e-06,
"loss": 0.3011,
"step": 934
},
{
"epoch": 1.79616,
"grad_norm": 0.4908227026462555,
"learning_rate": 4.142779313095223e-06,
"loss": 0.3575,
"step": 935
},
{
"epoch": 1.7980800000000001,
"grad_norm": 0.5024188756942749,
"learning_rate": 4.131759111665349e-06,
"loss": 0.3397,
"step": 936
},
{
"epoch": 1.8,
"grad_norm": 0.46123698353767395,
"learning_rate": 4.120743257395054e-06,
"loss": 0.258,
"step": 937
},
{
"epoch": 1.80192,
"grad_norm": 0.5350671410560608,
"learning_rate": 4.109731805439168e-06,
"loss": 0.3193,
"step": 938
},
{
"epoch": 1.80384,
"grad_norm": 0.5779839158058167,
"learning_rate": 4.098724810930472e-06,
"loss": 0.4118,
"step": 939
},
{
"epoch": 1.80576,
"grad_norm": 0.4988171458244324,
"learning_rate": 4.087722328979437e-06,
"loss": 0.3342,
"step": 940
},
{
"epoch": 1.80768,
"grad_norm": 0.4945155680179596,
"learning_rate": 4.076724414673937e-06,
"loss": 0.2833,
"step": 941
},
{
"epoch": 1.8096,
"grad_norm": 0.4874011278152466,
"learning_rate": 4.065731123078977e-06,
"loss": 0.2954,
"step": 942
},
{
"epoch": 1.81152,
"grad_norm": 0.5779184103012085,
"learning_rate": 4.054742509236416e-06,
"loss": 0.4105,
"step": 943
},
{
"epoch": 1.81344,
"grad_norm": 0.4962846338748932,
"learning_rate": 4.043758628164688e-06,
"loss": 0.3044,
"step": 944
},
{
"epoch": 1.81536,
"grad_norm": 0.47650501132011414,
"learning_rate": 4.032779534858544e-06,
"loss": 0.3042,
"step": 945
},
{
"epoch": 1.81728,
"grad_norm": 0.4834893047809601,
"learning_rate": 4.021805284288749e-06,
"loss": 0.3028,
"step": 946
},
{
"epoch": 1.8192,
"grad_norm": 0.5058763027191162,
"learning_rate": 4.0108359314018254e-06,
"loss": 0.3154,
"step": 947
},
{
"epoch": 1.82112,
"grad_norm": 0.5589724779129028,
"learning_rate": 3.999871531119779e-06,
"loss": 0.3573,
"step": 948
},
{
"epoch": 1.82304,
"grad_norm": 0.48969992995262146,
"learning_rate": 3.988912138339812e-06,
"loss": 0.3268,
"step": 949
},
{
"epoch": 1.82496,
"grad_norm": 0.5236085653305054,
"learning_rate": 3.977957807934055e-06,
"loss": 0.3479,
"step": 950
},
{
"epoch": 1.82688,
"grad_norm": 0.5456592440605164,
"learning_rate": 3.9670085947493e-06,
"loss": 0.34,
"step": 951
},
{
"epoch": 1.8288,
"grad_norm": 0.5171096324920654,
"learning_rate": 3.956064553606708e-06,
"loss": 0.3041,
"step": 952
},
{
"epoch": 1.83072,
"grad_norm": 0.48784172534942627,
"learning_rate": 3.945125739301547e-06,
"loss": 0.3068,
"step": 953
},
{
"epoch": 1.83264,
"grad_norm": 0.515189528465271,
"learning_rate": 3.934192206602921e-06,
"loss": 0.2903,
"step": 954
},
{
"epoch": 1.83456,
"grad_norm": 0.4961507320404053,
"learning_rate": 3.923264010253479e-06,
"loss": 0.3491,
"step": 955
},
{
"epoch": 1.83648,
"grad_norm": 0.47034502029418945,
"learning_rate": 3.912341204969164e-06,
"loss": 0.3119,
"step": 956
},
{
"epoch": 1.8384,
"grad_norm": 0.48253878951072693,
"learning_rate": 3.901423845438916e-06,
"loss": 0.3263,
"step": 957
},
{
"epoch": 1.84032,
"grad_norm": 0.5012738108634949,
"learning_rate": 3.890511986324413e-06,
"loss": 0.3206,
"step": 958
},
{
"epoch": 1.8422399999999999,
"grad_norm": 0.5565716624259949,
"learning_rate": 3.879605682259797e-06,
"loss": 0.3432,
"step": 959
},
{
"epoch": 1.84416,
"grad_norm": 0.5113378763198853,
"learning_rate": 3.86870498785139e-06,
"loss": 0.3184,
"step": 960
},
{
"epoch": 1.8460800000000002,
"grad_norm": 0.5132129788398743,
"learning_rate": 3.857809957677432e-06,
"loss": 0.3408,
"step": 961
},
{
"epoch": 1.8479999999999999,
"grad_norm": 0.49595579504966736,
"learning_rate": 3.8469206462878e-06,
"loss": 0.2996,
"step": 962
},
{
"epoch": 1.84992,
"grad_norm": 0.5151838064193726,
"learning_rate": 3.83603710820374e-06,
"loss": 0.3846,
"step": 963
},
{
"epoch": 1.8518400000000002,
"grad_norm": 0.5627796053886414,
"learning_rate": 3.825159397917589e-06,
"loss": 0.3802,
"step": 964
},
{
"epoch": 1.8537599999999999,
"grad_norm": 0.4754968285560608,
"learning_rate": 3.814287569892512e-06,
"loss": 0.2748,
"step": 965
},
{
"epoch": 1.85568,
"grad_norm": 0.47414740920066833,
"learning_rate": 3.803421678562213e-06,
"loss": 0.3246,
"step": 966
},
{
"epoch": 1.8576000000000001,
"grad_norm": 0.4961852431297302,
"learning_rate": 3.7925617783306757e-06,
"loss": 0.3303,
"step": 967
},
{
"epoch": 1.8595199999999998,
"grad_norm": 0.5024544596672058,
"learning_rate": 3.781707923571891e-06,
"loss": 0.3682,
"step": 968
},
{
"epoch": 1.86144,
"grad_norm": 0.48516663908958435,
"learning_rate": 3.7708601686295756e-06,
"loss": 0.3285,
"step": 969
},
{
"epoch": 1.8633600000000001,
"grad_norm": 0.4901461899280548,
"learning_rate": 3.7600185678169083e-06,
"loss": 0.3069,
"step": 970
},
{
"epoch": 1.86528,
"grad_norm": 0.5152976512908936,
"learning_rate": 3.7491831754162533e-06,
"loss": 0.3651,
"step": 971
},
{
"epoch": 1.8672,
"grad_norm": 0.45666342973709106,
"learning_rate": 3.7383540456788915e-06,
"loss": 0.2718,
"step": 972
},
{
"epoch": 1.8691200000000001,
"grad_norm": 0.5090336203575134,
"learning_rate": 3.727531232824747e-06,
"loss": 0.366,
"step": 973
},
{
"epoch": 1.87104,
"grad_norm": 0.5222039818763733,
"learning_rate": 3.7167147910421165e-06,
"loss": 0.3465,
"step": 974
},
{
"epoch": 1.87296,
"grad_norm": 0.47645071148872375,
"learning_rate": 3.705904774487396e-06,
"loss": 0.297,
"step": 975
},
{
"epoch": 1.87488,
"grad_norm": 0.5330909490585327,
"learning_rate": 3.695101237284815e-06,
"loss": 0.3437,
"step": 976
},
{
"epoch": 1.8768,
"grad_norm": 0.45454466342926025,
"learning_rate": 3.6843042335261583e-06,
"loss": 0.2789,
"step": 977
},
{
"epoch": 1.87872,
"grad_norm": 0.48672232031822205,
"learning_rate": 3.6735138172704967e-06,
"loss": 0.3424,
"step": 978
},
{
"epoch": 1.88064,
"grad_norm": 0.49056580662727356,
"learning_rate": 3.662730042543926e-06,
"loss": 0.311,
"step": 979
},
{
"epoch": 1.88256,
"grad_norm": 0.48921382427215576,
"learning_rate": 3.6519529633392825e-06,
"loss": 0.3513,
"step": 980
},
{
"epoch": 1.88448,
"grad_norm": 0.494147390127182,
"learning_rate": 3.6411826336158785e-06,
"loss": 0.344,
"step": 981
},
{
"epoch": 1.8864,
"grad_norm": 0.48248007893562317,
"learning_rate": 3.6304191072992376e-06,
"loss": 0.3265,
"step": 982
},
{
"epoch": 1.88832,
"grad_norm": 0.4985763430595398,
"learning_rate": 3.619662438280816e-06,
"loss": 0.3583,
"step": 983
},
{
"epoch": 1.89024,
"grad_norm": 0.48806115984916687,
"learning_rate": 3.6089126804177373e-06,
"loss": 0.3109,
"step": 984
},
{
"epoch": 1.89216,
"grad_norm": 0.5044103264808655,
"learning_rate": 3.5981698875325214e-06,
"loss": 0.3398,
"step": 985
},
{
"epoch": 1.89408,
"grad_norm": 0.5035799145698547,
"learning_rate": 3.5874341134128156e-06,
"loss": 0.3313,
"step": 986
},
{
"epoch": 1.896,
"grad_norm": 0.489520788192749,
"learning_rate": 3.5767054118111266e-06,
"loss": 0.3324,
"step": 987
},
{
"epoch": 1.89792,
"grad_norm": 0.4867776036262512,
"learning_rate": 3.5659838364445505e-06,
"loss": 0.3494,
"step": 988
},
{
"epoch": 1.89984,
"grad_norm": 0.4687146544456482,
"learning_rate": 3.555269440994496e-06,
"loss": 0.2827,
"step": 989
},
{
"epoch": 1.90176,
"grad_norm": 0.5176199078559875,
"learning_rate": 3.544562279106436e-06,
"loss": 0.3754,
"step": 990
},
{
"epoch": 1.90368,
"grad_norm": 0.572658896446228,
"learning_rate": 3.5338624043896154e-06,
"loss": 0.4373,
"step": 991
},
{
"epoch": 1.9056,
"grad_norm": 0.43591752648353577,
"learning_rate": 3.523169870416795e-06,
"loss": 0.2504,
"step": 992
},
{
"epoch": 1.9075199999999999,
"grad_norm": 0.4796431064605713,
"learning_rate": 3.5124847307239863e-06,
"loss": 0.3481,
"step": 993
},
{
"epoch": 1.90944,
"grad_norm": 0.47742682695388794,
"learning_rate": 3.501807038810174e-06,
"loss": 0.3302,
"step": 994
},
{
"epoch": 1.91136,
"grad_norm": 0.4486038088798523,
"learning_rate": 3.4911368481370535e-06,
"loss": 0.2773,
"step": 995
},
{
"epoch": 1.9132799999999999,
"grad_norm": 0.46793332695961,
"learning_rate": 3.480474212128766e-06,
"loss": 0.3518,
"step": 996
},
{
"epoch": 1.9152,
"grad_norm": 0.6371752023696899,
"learning_rate": 3.469819184171623e-06,
"loss": 0.4425,
"step": 997
},
{
"epoch": 1.9171200000000002,
"grad_norm": 0.5064558982849121,
"learning_rate": 3.459171817613847e-06,
"loss": 0.3198,
"step": 998
},
{
"epoch": 1.9190399999999999,
"grad_norm": 0.49334996938705444,
"learning_rate": 3.4485321657653e-06,
"loss": 0.309,
"step": 999
},
{
"epoch": 1.92096,
"grad_norm": 0.5114083290100098,
"learning_rate": 3.4379002818972122e-06,
"loss": 0.3355,
"step": 1000
},
{
"epoch": 1.9228800000000001,
"grad_norm": 0.544437050819397,
"learning_rate": 3.427276219241933e-06,
"loss": 0.3203,
"step": 1001
},
{
"epoch": 1.9247999999999998,
"grad_norm": 0.5670840740203857,
"learning_rate": 3.416660030992639e-06,
"loss": 0.3691,
"step": 1002
},
{
"epoch": 1.92672,
"grad_norm": 0.4540363848209381,
"learning_rate": 3.406051770303087e-06,
"loss": 0.2837,
"step": 1003
},
{
"epoch": 1.9286400000000001,
"grad_norm": 0.4597378373146057,
"learning_rate": 3.3954514902873427e-06,
"loss": 0.2854,
"step": 1004
},
{
"epoch": 1.93056,
"grad_norm": 0.5119041800498962,
"learning_rate": 3.3848592440195118e-06,
"loss": 0.347,
"step": 1005
},
{
"epoch": 1.93248,
"grad_norm": 0.49075472354888916,
"learning_rate": 3.3742750845334748e-06,
"loss": 0.3388,
"step": 1006
},
{
"epoch": 1.9344000000000001,
"grad_norm": 0.5604220628738403,
"learning_rate": 3.3636990648226258e-06,
"loss": 0.3174,
"step": 1007
},
{
"epoch": 1.93632,
"grad_norm": 0.4765825569629669,
"learning_rate": 3.3531312378396026e-06,
"loss": 0.3044,
"step": 1008
},
{
"epoch": 1.93824,
"grad_norm": 0.5442520380020142,
"learning_rate": 3.342571656496022e-06,
"loss": 0.3966,
"step": 1009
},
{
"epoch": 1.94016,
"grad_norm": 0.47910410165786743,
"learning_rate": 3.3320203736622185e-06,
"loss": 0.3549,
"step": 1010
},
{
"epoch": 1.94208,
"grad_norm": 0.5413928031921387,
"learning_rate": 3.3214774421669777e-06,
"loss": 0.4148,
"step": 1011
},
{
"epoch": 1.944,
"grad_norm": 0.4700442850589752,
"learning_rate": 3.310942914797265e-06,
"loss": 0.3227,
"step": 1012
},
{
"epoch": 1.94592,
"grad_norm": 0.5131711959838867,
"learning_rate": 3.3004168442979755e-06,
"loss": 0.3553,
"step": 1013
},
{
"epoch": 1.94784,
"grad_norm": 0.5190303325653076,
"learning_rate": 3.289899283371657e-06,
"loss": 0.341,
"step": 1014
},
{
"epoch": 1.94976,
"grad_norm": 0.4377288818359375,
"learning_rate": 3.2793902846782534e-06,
"loss": 0.2779,
"step": 1015
},
{
"epoch": 1.95168,
"grad_norm": 0.4982631802558899,
"learning_rate": 3.2688899008348386e-06,
"loss": 0.2852,
"step": 1016
},
{
"epoch": 1.9536,
"grad_norm": 0.5625821948051453,
"learning_rate": 3.2583981844153487e-06,
"loss": 0.3678,
"step": 1017
},
{
"epoch": 1.95552,
"grad_norm": 0.49450066685676575,
"learning_rate": 3.2479151879503324e-06,
"loss": 0.3506,
"step": 1018
},
{
"epoch": 1.95744,
"grad_norm": 0.5380319356918335,
"learning_rate": 3.2374409639266695e-06,
"loss": 0.3755,
"step": 1019
},
{
"epoch": 1.95936,
"grad_norm": 0.4946126937866211,
"learning_rate": 3.226975564787322e-06,
"loss": 0.341,
"step": 1020
},
{
"epoch": 1.96128,
"grad_norm": 0.5429108142852783,
"learning_rate": 3.2165190429310674e-06,
"loss": 0.394,
"step": 1021
},
{
"epoch": 1.9632,
"grad_norm": 0.49507689476013184,
"learning_rate": 3.206071450712235e-06,
"loss": 0.3074,
"step": 1022
},
{
"epoch": 1.96512,
"grad_norm": 0.5511486530303955,
"learning_rate": 3.1956328404404403e-06,
"loss": 0.347,
"step": 1023
},
{
"epoch": 1.96704,
"grad_norm": 0.48406052589416504,
"learning_rate": 3.1852032643803377e-06,
"loss": 0.3073,
"step": 1024
},
{
"epoch": 1.96896,
"grad_norm": 0.4810790419578552,
"learning_rate": 3.174782774751338e-06,
"loss": 0.3815,
"step": 1025
},
{
"epoch": 1.97088,
"grad_norm": 0.4630287289619446,
"learning_rate": 3.1643714237273628e-06,
"loss": 0.3431,
"step": 1026
},
{
"epoch": 1.9727999999999999,
"grad_norm": 0.5034884214401245,
"learning_rate": 3.1539692634365788e-06,
"loss": 0.3384,
"step": 1027
},
{
"epoch": 1.97472,
"grad_norm": 0.44290706515312195,
"learning_rate": 3.143576345961132e-06,
"loss": 0.2566,
"step": 1028
},
{
"epoch": 1.97664,
"grad_norm": 0.5320993661880493,
"learning_rate": 3.1331927233368954e-06,
"loss": 0.3659,
"step": 1029
},
{
"epoch": 1.9785599999999999,
"grad_norm": 0.5062898397445679,
"learning_rate": 3.1228184475532015e-06,
"loss": 0.3647,
"step": 1030
},
{
"epoch": 1.98048,
"grad_norm": 0.45649176836013794,
"learning_rate": 3.112453570552583e-06,
"loss": 0.3003,
"step": 1031
},
{
"epoch": 1.9824000000000002,
"grad_norm": 0.4806617498397827,
"learning_rate": 3.1020981442305187e-06,
"loss": 0.3465,
"step": 1032
},
{
"epoch": 1.9843199999999999,
"grad_norm": 0.46349433064460754,
"learning_rate": 3.091752220435166e-06,
"loss": 0.307,
"step": 1033
},
{
"epoch": 1.98624,
"grad_norm": 0.5566308498382568,
"learning_rate": 3.0814158509671015e-06,
"loss": 0.3884,
"step": 1034
},
{
"epoch": 1.9881600000000001,
"grad_norm": 0.586917519569397,
"learning_rate": 3.0710890875790745e-06,
"loss": 0.3533,
"step": 1035
},
{
"epoch": 1.9900799999999998,
"grad_norm": 0.509601354598999,
"learning_rate": 3.0607719819757264e-06,
"loss": 0.3109,
"step": 1036
},
{
"epoch": 1.992,
"grad_norm": 0.4961627721786499,
"learning_rate": 3.0504645858133507e-06,
"loss": 0.3448,
"step": 1037
},
{
"epoch": 1.9939200000000001,
"grad_norm": 0.4839821755886078,
"learning_rate": 3.040166950699626e-06,
"loss": 0.3702,
"step": 1038
},
{
"epoch": 1.9958399999999998,
"grad_norm": 0.47689089179039,
"learning_rate": 3.029879128193356e-06,
"loss": 0.2771,
"step": 1039
},
{
"epoch": 1.99776,
"grad_norm": 0.5058227181434631,
"learning_rate": 3.019601169804216e-06,
"loss": 0.4009,
"step": 1040
},
{
"epoch": 2.00064,
"grad_norm": 0.9545174241065979,
"learning_rate": 3.0093331269924954e-06,
"loss": 0.4388,
"step": 1041
},
{
"epoch": 2.00256,
"grad_norm": 0.4813426434993744,
"learning_rate": 2.9990750511688323e-06,
"loss": 0.2635,
"step": 1042
},
{
"epoch": 2.00448,
"grad_norm": 0.5681573748588562,
"learning_rate": 2.988826993693967e-06,
"loss": 0.2996,
"step": 1043
},
{
"epoch": 2.0064,
"grad_norm": 0.4733174443244934,
"learning_rate": 2.978589005878476e-06,
"loss": 0.2865,
"step": 1044
},
{
"epoch": 2.00832,
"grad_norm": 0.45871591567993164,
"learning_rate": 2.968361138982517e-06,
"loss": 0.2768,
"step": 1045
},
{
"epoch": 2.01024,
"grad_norm": 0.5240322351455688,
"learning_rate": 2.9581434442155798e-06,
"loss": 0.332,
"step": 1046
},
{
"epoch": 2.01216,
"grad_norm": 0.5047451853752136,
"learning_rate": 2.947935972736217e-06,
"loss": 0.301,
"step": 1047
},
{
"epoch": 2.01408,
"grad_norm": 0.4940253496170044,
"learning_rate": 2.937738775651798e-06,
"loss": 0.2849,
"step": 1048
},
{
"epoch": 2.016,
"grad_norm": 0.45726191997528076,
"learning_rate": 2.9275519040182503e-06,
"loss": 0.2045,
"step": 1049
},
{
"epoch": 2.01792,
"grad_norm": 0.4858790934085846,
"learning_rate": 2.917375408839803e-06,
"loss": 0.2748,
"step": 1050
},
{
"epoch": 2.01984,
"grad_norm": 0.5420165657997131,
"learning_rate": 2.9072093410687268e-06,
"loss": 0.3559,
"step": 1051
},
{
"epoch": 2.02176,
"grad_norm": 0.4875539541244507,
"learning_rate": 2.8970537516050935e-06,
"loss": 0.2996,
"step": 1052
},
{
"epoch": 2.02368,
"grad_norm": 0.4699147641658783,
"learning_rate": 2.886908691296504e-06,
"loss": 0.2564,
"step": 1053
},
{
"epoch": 2.0256,
"grad_norm": 0.4872596859931946,
"learning_rate": 2.876774210937843e-06,
"loss": 0.2911,
"step": 1054
},
{
"epoch": 2.02752,
"grad_norm": 0.5015615820884705,
"learning_rate": 2.866650361271023e-06,
"loss": 0.2849,
"step": 1055
},
{
"epoch": 2.02944,
"grad_norm": 0.5034334659576416,
"learning_rate": 2.8565371929847286e-06,
"loss": 0.3168,
"step": 1056
},
{
"epoch": 2.03136,
"grad_norm": 0.5125169157981873,
"learning_rate": 2.84643475671417e-06,
"loss": 0.2785,
"step": 1057
},
{
"epoch": 2.03328,
"grad_norm": 0.5271072387695312,
"learning_rate": 2.836343103040819e-06,
"loss": 0.3336,
"step": 1058
},
{
"epoch": 2.0352,
"grad_norm": 0.5400678515434265,
"learning_rate": 2.8262622824921593e-06,
"loss": 0.2702,
"step": 1059
},
{
"epoch": 2.03712,
"grad_norm": 0.5352171063423157,
"learning_rate": 2.816192345541437e-06,
"loss": 0.2651,
"step": 1060
},
{
"epoch": 2.03904,
"grad_norm": 0.5003254413604736,
"learning_rate": 2.8061333426074054e-06,
"loss": 0.2777,
"step": 1061
},
{
"epoch": 2.04096,
"grad_norm": 0.5269042253494263,
"learning_rate": 2.7960853240540703e-06,
"loss": 0.3002,
"step": 1062
},
{
"epoch": 2.04288,
"grad_norm": 0.5170962810516357,
"learning_rate": 2.7860483401904483e-06,
"loss": 0.3205,
"step": 1063
},
{
"epoch": 2.0448,
"grad_norm": 0.5289940237998962,
"learning_rate": 2.776022441270295e-06,
"loss": 0.2825,
"step": 1064
},
{
"epoch": 2.04672,
"grad_norm": 0.5449745655059814,
"learning_rate": 2.766007677491871e-06,
"loss": 0.27,
"step": 1065
},
{
"epoch": 2.04864,
"grad_norm": 0.48384344577789307,
"learning_rate": 2.7560040989976894e-06,
"loss": 0.2519,
"step": 1066
},
{
"epoch": 2.05056,
"grad_norm": 0.5239966511726379,
"learning_rate": 2.7460117558742532e-06,
"loss": 0.2817,
"step": 1067
},
{
"epoch": 2.05248,
"grad_norm": 0.5425927042961121,
"learning_rate": 2.736030698151815e-06,
"loss": 0.3113,
"step": 1068
},
{
"epoch": 2.0544,
"grad_norm": 0.5354337096214294,
"learning_rate": 2.72606097580412e-06,
"loss": 0.3104,
"step": 1069
},
{
"epoch": 2.05632,
"grad_norm": 0.5009585022926331,
"learning_rate": 2.7161026387481636e-06,
"loss": 0.2682,
"step": 1070
},
{
"epoch": 2.05824,
"grad_norm": 0.5295016169548035,
"learning_rate": 2.7061557368439294e-06,
"loss": 0.3398,
"step": 1071
},
{
"epoch": 2.06016,
"grad_norm": 0.49321863055229187,
"learning_rate": 2.6962203198941587e-06,
"loss": 0.2547,
"step": 1072
},
{
"epoch": 2.06208,
"grad_norm": 0.5190016627311707,
"learning_rate": 2.686296437644074e-06,
"loss": 0.2593,
"step": 1073
},
{
"epoch": 2.064,
"grad_norm": 0.47988367080688477,
"learning_rate": 2.6763841397811576e-06,
"loss": 0.2548,
"step": 1074
},
{
"epoch": 2.06592,
"grad_norm": 0.5053234696388245,
"learning_rate": 2.666483475934885e-06,
"loss": 0.2779,
"step": 1075
},
{
"epoch": 2.06784,
"grad_norm": 0.5281071066856384,
"learning_rate": 2.656594495676482e-06,
"loss": 0.3047,
"step": 1076
},
{
"epoch": 2.06976,
"grad_norm": 0.4990590810775757,
"learning_rate": 2.6467172485186775e-06,
"loss": 0.263,
"step": 1077
},
{
"epoch": 2.07168,
"grad_norm": 0.49443089962005615,
"learning_rate": 2.636851783915454e-06,
"loss": 0.2966,
"step": 1078
},
{
"epoch": 2.0736,
"grad_norm": 0.46977147459983826,
"learning_rate": 2.626998151261798e-06,
"loss": 0.2769,
"step": 1079
},
{
"epoch": 2.07552,
"grad_norm": 0.5732733011245728,
"learning_rate": 2.6171563998934605e-06,
"loss": 0.4097,
"step": 1080
},
{
"epoch": 2.07744,
"grad_norm": 0.44739052653312683,
"learning_rate": 2.607326579086701e-06,
"loss": 0.2686,
"step": 1081
},
{
"epoch": 2.07936,
"grad_norm": 0.5070497393608093,
"learning_rate": 2.5975087380580445e-06,
"loss": 0.2711,
"step": 1082
},
{
"epoch": 2.08128,
"grad_norm": 0.5222105383872986,
"learning_rate": 2.587702925964034e-06,
"loss": 0.3293,
"step": 1083
},
{
"epoch": 2.0832,
"grad_norm": 0.5033939480781555,
"learning_rate": 2.577909191900988e-06,
"loss": 0.2487,
"step": 1084
},
{
"epoch": 2.08512,
"grad_norm": 0.4947723150253296,
"learning_rate": 2.5681275849047482e-06,
"loss": 0.2513,
"step": 1085
},
{
"epoch": 2.08704,
"grad_norm": 0.5033813714981079,
"learning_rate": 2.5583581539504464e-06,
"loss": 0.291,
"step": 1086
},
{
"epoch": 2.08896,
"grad_norm": 0.5660333633422852,
"learning_rate": 2.5486009479522355e-06,
"loss": 0.311,
"step": 1087
},
{
"epoch": 2.09088,
"grad_norm": 0.4899430274963379,
"learning_rate": 2.5388560157630765e-06,
"loss": 0.2578,
"step": 1088
},
{
"epoch": 2.0928,
"grad_norm": 0.5390803217887878,
"learning_rate": 2.5291234061744655e-06,
"loss": 0.2865,
"step": 1089
},
{
"epoch": 2.09472,
"grad_norm": 0.4956015944480896,
"learning_rate": 2.519403167916207e-06,
"loss": 0.2577,
"step": 1090
},
{
"epoch": 2.09664,
"grad_norm": 0.5125566720962524,
"learning_rate": 2.50969534965616e-06,
"loss": 0.3064,
"step": 1091
},
{
"epoch": 2.09856,
"grad_norm": 0.6436797976493835,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.3994,
"step": 1092
},
{
"epoch": 2.10048,
"grad_norm": 0.48929932713508606,
"learning_rate": 2.490317167490976e-06,
"loss": 0.3051,
"step": 1093
},
{
"epoch": 2.1024,
"grad_norm": 0.49640828371047974,
"learning_rate": 2.480646900609664e-06,
"loss": 0.2609,
"step": 1094
},
{
"epoch": 2.10432,
"grad_norm": 0.5548200011253357,
"learning_rate": 2.4709892477737263e-06,
"loss": 0.3303,
"step": 1095
},
{
"epoch": 2.10624,
"grad_norm": 0.5048955082893372,
"learning_rate": 2.4613442573376625e-06,
"loss": 0.2656,
"step": 1096
},
{
"epoch": 2.10816,
"grad_norm": 0.5204638242721558,
"learning_rate": 2.4517119775925824e-06,
"loss": 0.3097,
"step": 1097
},
{
"epoch": 2.11008,
"grad_norm": 0.4764806032180786,
"learning_rate": 2.4420924567659508e-06,
"loss": 0.2538,
"step": 1098
},
{
"epoch": 2.112,
"grad_norm": 0.4691280424594879,
"learning_rate": 2.4324857430213504e-06,
"loss": 0.2214,
"step": 1099
},
{
"epoch": 2.11392,
"grad_norm": 0.5575925707817078,
"learning_rate": 2.422891884458241e-06,
"loss": 0.3502,
"step": 1100
},
{
"epoch": 2.11584,
"grad_norm": 0.5013216733932495,
"learning_rate": 2.4133109291117156e-06,
"loss": 0.2679,
"step": 1101
},
{
"epoch": 2.11776,
"grad_norm": 0.5373671650886536,
"learning_rate": 2.4037429249522702e-06,
"loss": 0.3096,
"step": 1102
},
{
"epoch": 2.11968,
"grad_norm": 0.5258444547653198,
"learning_rate": 2.394187919885548e-06,
"loss": 0.301,
"step": 1103
},
{
"epoch": 2.1216,
"grad_norm": 0.5288597345352173,
"learning_rate": 2.384645961752113e-06,
"loss": 0.3019,
"step": 1104
},
{
"epoch": 2.12352,
"grad_norm": 0.47741061449050903,
"learning_rate": 2.3751170983272e-06,
"loss": 0.2944,
"step": 1105
},
{
"epoch": 2.12544,
"grad_norm": 0.5065288543701172,
"learning_rate": 2.3656013773204843e-06,
"loss": 0.2824,
"step": 1106
},
{
"epoch": 2.12736,
"grad_norm": 0.5686082243919373,
"learning_rate": 2.3560988463758366e-06,
"loss": 0.3638,
"step": 1107
},
{
"epoch": 2.12928,
"grad_norm": 0.5141527652740479,
"learning_rate": 2.346609553071093e-06,
"loss": 0.3119,
"step": 1108
},
{
"epoch": 2.1312,
"grad_norm": 0.4824559688568115,
"learning_rate": 2.3371335449178006e-06,
"loss": 0.255,
"step": 1109
},
{
"epoch": 2.13312,
"grad_norm": 0.5781142711639404,
"learning_rate": 2.3276708693609947e-06,
"loss": 0.3121,
"step": 1110
},
{
"epoch": 2.13504,
"grad_norm": 0.5009545087814331,
"learning_rate": 2.3182215737789593e-06,
"loss": 0.3135,
"step": 1111
},
{
"epoch": 2.13696,
"grad_norm": 0.5075796842575073,
"learning_rate": 2.308785705482982e-06,
"loss": 0.2823,
"step": 1112
},
{
"epoch": 2.13888,
"grad_norm": 0.49607834219932556,
"learning_rate": 2.2993633117171243e-06,
"loss": 0.3009,
"step": 1113
},
{
"epoch": 2.1408,
"grad_norm": 0.5013031363487244,
"learning_rate": 2.289954439657981e-06,
"loss": 0.3318,
"step": 1114
},
{
"epoch": 2.14272,
"grad_norm": 0.472540020942688,
"learning_rate": 2.2805591364144446e-06,
"loss": 0.2629,
"step": 1115
},
{
"epoch": 2.14464,
"grad_norm": 0.5187729597091675,
"learning_rate": 2.2711774490274767e-06,
"loss": 0.3054,
"step": 1116
},
{
"epoch": 2.14656,
"grad_norm": 0.5210239887237549,
"learning_rate": 2.2618094244698614e-06,
"loss": 0.2922,
"step": 1117
},
{
"epoch": 2.14848,
"grad_norm": 0.5011726021766663,
"learning_rate": 2.2524551096459703e-06,
"loss": 0.2313,
"step": 1118
},
{
"epoch": 2.1504,
"grad_norm": 0.5252903699874878,
"learning_rate": 2.243114551391542e-06,
"loss": 0.2797,
"step": 1119
},
{
"epoch": 2.15232,
"grad_norm": 0.48239031434059143,
"learning_rate": 2.2337877964734324e-06,
"loss": 0.2728,
"step": 1120
},
{
"epoch": 2.15424,
"grad_norm": 0.5412247180938721,
"learning_rate": 2.224474891589386e-06,
"loss": 0.2748,
"step": 1121
},
{
"epoch": 2.15616,
"grad_norm": 0.5259332656860352,
"learning_rate": 2.2151758833678044e-06,
"loss": 0.2783,
"step": 1122
},
{
"epoch": 2.15808,
"grad_norm": 0.5343291163444519,
"learning_rate": 2.205890818367508e-06,
"loss": 0.3185,
"step": 1123
},
{
"epoch": 2.16,
"grad_norm": 0.5024482607841492,
"learning_rate": 2.1966197430775056e-06,
"loss": 0.2804,
"step": 1124
},
{
"epoch": 2.16192,
"grad_norm": 0.5297809839248657,
"learning_rate": 2.187362703916766e-06,
"loss": 0.2699,
"step": 1125
},
{
"epoch": 2.16384,
"grad_norm": 0.5424187183380127,
"learning_rate": 2.178119747233976e-06,
"loss": 0.2749,
"step": 1126
},
{
"epoch": 2.16576,
"grad_norm": 0.5112246870994568,
"learning_rate": 2.168890919307315e-06,
"loss": 0.3258,
"step": 1127
},
{
"epoch": 2.16768,
"grad_norm": 0.47564437985420227,
"learning_rate": 2.159676266344222e-06,
"loss": 0.2573,
"step": 1128
},
{
"epoch": 2.1696,
"grad_norm": 0.5513551831245422,
"learning_rate": 2.1504758344811615e-06,
"loss": 0.308,
"step": 1129
},
{
"epoch": 2.17152,
"grad_norm": 0.5051606893539429,
"learning_rate": 2.141289669783401e-06,
"loss": 0.2809,
"step": 1130
},
{
"epoch": 2.17344,
"grad_norm": 0.5100437998771667,
"learning_rate": 2.132117818244771e-06,
"loss": 0.289,
"step": 1131
},
{
"epoch": 2.17536,
"grad_norm": 0.5086724162101746,
"learning_rate": 2.122960325787432e-06,
"loss": 0.2888,
"step": 1132
},
{
"epoch": 2.17728,
"grad_norm": 0.5521082282066345,
"learning_rate": 2.1138172382616612e-06,
"loss": 0.3649,
"step": 1133
},
{
"epoch": 2.1792,
"grad_norm": 0.48624226450920105,
"learning_rate": 2.104688601445606e-06,
"loss": 0.2731,
"step": 1134
},
{
"epoch": 2.18112,
"grad_norm": 0.47339239716529846,
"learning_rate": 2.0955744610450616e-06,
"loss": 0.2618,
"step": 1135
},
{
"epoch": 2.18304,
"grad_norm": 0.519081175327301,
"learning_rate": 2.086474862693244e-06,
"loss": 0.3187,
"step": 1136
},
{
"epoch": 2.1849600000000002,
"grad_norm": 0.5010805130004883,
"learning_rate": 2.077389851950557e-06,
"loss": 0.3175,
"step": 1137
},
{
"epoch": 2.18688,
"grad_norm": 0.5163206458091736,
"learning_rate": 2.068319474304365e-06,
"loss": 0.2749,
"step": 1138
},
{
"epoch": 2.1888,
"grad_norm": 0.5012432336807251,
"learning_rate": 2.059263775168773e-06,
"loss": 0.3081,
"step": 1139
},
{
"epoch": 2.19072,
"grad_norm": 0.49186256527900696,
"learning_rate": 2.050222799884387e-06,
"loss": 0.2454,
"step": 1140
},
{
"epoch": 2.19264,
"grad_norm": 0.5336266756057739,
"learning_rate": 2.0411965937180907e-06,
"loss": 0.3034,
"step": 1141
},
{
"epoch": 2.19456,
"grad_norm": 0.4975786507129669,
"learning_rate": 2.0321852018628278e-06,
"loss": 0.283,
"step": 1142
},
{
"epoch": 2.19648,
"grad_norm": 0.5473085641860962,
"learning_rate": 2.0231886694373653e-06,
"loss": 0.3406,
"step": 1143
},
{
"epoch": 2.1984,
"grad_norm": 0.49892741441726685,
"learning_rate": 2.0142070414860704e-06,
"loss": 0.2602,
"step": 1144
},
{
"epoch": 2.20032,
"grad_norm": 0.4957484304904938,
"learning_rate": 2.005240362978686e-06,
"loss": 0.2424,
"step": 1145
},
{
"epoch": 2.20224,
"grad_norm": 0.518088161945343,
"learning_rate": 1.996288678810105e-06,
"loss": 0.2879,
"step": 1146
},
{
"epoch": 2.20416,
"grad_norm": 0.5527055263519287,
"learning_rate": 1.98735203380015e-06,
"loss": 0.3131,
"step": 1147
},
{
"epoch": 2.20608,
"grad_norm": 0.4892177879810333,
"learning_rate": 1.9784304726933384e-06,
"loss": 0.2607,
"step": 1148
},
{
"epoch": 2.208,
"grad_norm": 0.48440712690353394,
"learning_rate": 1.9695240401586687e-06,
"loss": 0.2674,
"step": 1149
},
{
"epoch": 2.20992,
"grad_norm": 0.4704214334487915,
"learning_rate": 1.9606327807893905e-06,
"loss": 0.2672,
"step": 1150
},
{
"epoch": 2.21184,
"grad_norm": 0.5575061440467834,
"learning_rate": 1.9517567391027853e-06,
"loss": 0.4047,
"step": 1151
},
{
"epoch": 2.21376,
"grad_norm": 0.5074655413627625,
"learning_rate": 1.942895959539939e-06,
"loss": 0.275,
"step": 1152
},
{
"epoch": 2.21568,
"grad_norm": 0.5127617120742798,
"learning_rate": 1.934050486465529e-06,
"loss": 0.263,
"step": 1153
},
{
"epoch": 2.2176,
"grad_norm": 0.5422980785369873,
"learning_rate": 1.9252203641675854e-06,
"loss": 0.2913,
"step": 1154
},
{
"epoch": 2.21952,
"grad_norm": 0.5218319296836853,
"learning_rate": 1.9164056368572847e-06,
"loss": 0.2891,
"step": 1155
},
{
"epoch": 2.22144,
"grad_norm": 0.5250471830368042,
"learning_rate": 1.9076063486687256e-06,
"loss": 0.33,
"step": 1156
},
{
"epoch": 2.22336,
"grad_norm": 0.48195943236351013,
"learning_rate": 1.8988225436587005e-06,
"loss": 0.2687,
"step": 1157
},
{
"epoch": 2.22528,
"grad_norm": 0.5046993494033813,
"learning_rate": 1.8900542658064807e-06,
"loss": 0.2582,
"step": 1158
},
{
"epoch": 2.2272,
"grad_norm": 0.5106871724128723,
"learning_rate": 1.8813015590135963e-06,
"loss": 0.284,
"step": 1159
},
{
"epoch": 2.22912,
"grad_norm": 0.5000110864639282,
"learning_rate": 1.8725644671036125e-06,
"loss": 0.2894,
"step": 1160
},
{
"epoch": 2.23104,
"grad_norm": 0.5613438487052917,
"learning_rate": 1.8638430338219199e-06,
"loss": 0.3655,
"step": 1161
},
{
"epoch": 2.23296,
"grad_norm": 0.5122923851013184,
"learning_rate": 1.8551373028355013e-06,
"loss": 0.316,
"step": 1162
},
{
"epoch": 2.23488,
"grad_norm": 0.5923593640327454,
"learning_rate": 1.846447317732723e-06,
"loss": 0.3216,
"step": 1163
},
{
"epoch": 2.2368,
"grad_norm": 0.4581090211868286,
"learning_rate": 1.8377731220231144e-06,
"loss": 0.2345,
"step": 1164
},
{
"epoch": 2.23872,
"grad_norm": 0.5233500003814697,
"learning_rate": 1.8291147591371482e-06,
"loss": 0.3002,
"step": 1165
},
{
"epoch": 2.24064,
"grad_norm": 0.5185627341270447,
"learning_rate": 1.8204722724260266e-06,
"loss": 0.2879,
"step": 1166
},
{
"epoch": 2.24256,
"grad_norm": 0.5060010552406311,
"learning_rate": 1.8118457051614591e-06,
"loss": 0.2928,
"step": 1167
},
{
"epoch": 2.24448,
"grad_norm": 0.48561593890190125,
"learning_rate": 1.803235100535452e-06,
"loss": 0.2776,
"step": 1168
},
{
"epoch": 2.2464,
"grad_norm": 0.5239033699035645,
"learning_rate": 1.7946405016600843e-06,
"loss": 0.3407,
"step": 1169
},
{
"epoch": 2.24832,
"grad_norm": 0.5480225086212158,
"learning_rate": 1.7860619515673034e-06,
"loss": 0.2727,
"step": 1170
},
{
"epoch": 2.25024,
"grad_norm": 0.5186541080474854,
"learning_rate": 1.7774994932086976e-06,
"loss": 0.2804,
"step": 1171
},
{
"epoch": 2.25216,
"grad_norm": 0.49505048990249634,
"learning_rate": 1.7689531694552863e-06,
"loss": 0.2944,
"step": 1172
},
{
"epoch": 2.25408,
"grad_norm": 0.5338854193687439,
"learning_rate": 1.7604230230973068e-06,
"loss": 0.306,
"step": 1173
},
{
"epoch": 2.2560000000000002,
"grad_norm": 0.4577998220920563,
"learning_rate": 1.7519090968439966e-06,
"loss": 0.2572,
"step": 1174
},
{
"epoch": 2.25792,
"grad_norm": 0.5280143618583679,
"learning_rate": 1.7434114333233853e-06,
"loss": 0.3028,
"step": 1175
},
{
"epoch": 2.25984,
"grad_norm": 0.5369488596916199,
"learning_rate": 1.7349300750820758e-06,
"loss": 0.2762,
"step": 1176
},
{
"epoch": 2.2617599999999998,
"grad_norm": 0.5128684043884277,
"learning_rate": 1.7264650645850256e-06,
"loss": 0.2836,
"step": 1177
},
{
"epoch": 2.26368,
"grad_norm": 0.502712607383728,
"learning_rate": 1.7180164442153529e-06,
"loss": 0.2297,
"step": 1178
},
{
"epoch": 2.2656,
"grad_norm": 0.5342593193054199,
"learning_rate": 1.709584256274106e-06,
"loss": 0.3069,
"step": 1179
},
{
"epoch": 2.26752,
"grad_norm": 0.540978193283081,
"learning_rate": 1.7011685429800596e-06,
"loss": 0.3167,
"step": 1180
},
{
"epoch": 2.26944,
"grad_norm": 0.48301181197166443,
"learning_rate": 1.6927693464695022e-06,
"loss": 0.2595,
"step": 1181
},
{
"epoch": 2.27136,
"grad_norm": 0.5143320560455322,
"learning_rate": 1.6843867087960252e-06,
"loss": 0.3076,
"step": 1182
},
{
"epoch": 2.27328,
"grad_norm": 0.48465707898139954,
"learning_rate": 1.6760206719303107e-06,
"loss": 0.2961,
"step": 1183
},
{
"epoch": 2.2752,
"grad_norm": 0.4967717230319977,
"learning_rate": 1.6676712777599275e-06,
"loss": 0.2758,
"step": 1184
},
{
"epoch": 2.27712,
"grad_norm": 0.5588000416755676,
"learning_rate": 1.6593385680891139e-06,
"loss": 0.3264,
"step": 1185
},
{
"epoch": 2.27904,
"grad_norm": 0.5057026147842407,
"learning_rate": 1.6510225846385668e-06,
"loss": 0.2672,
"step": 1186
},
{
"epoch": 2.28096,
"grad_norm": 0.48821648955345154,
"learning_rate": 1.6427233690452455e-06,
"loss": 0.2934,
"step": 1187
},
{
"epoch": 2.28288,
"grad_norm": 0.46941202878952026,
"learning_rate": 1.6344409628621482e-06,
"loss": 0.2354,
"step": 1188
},
{
"epoch": 2.2848,
"grad_norm": 0.5071481466293335,
"learning_rate": 1.6261754075581187e-06,
"loss": 0.2571,
"step": 1189
},
{
"epoch": 2.28672,
"grad_norm": 0.5312147736549377,
"learning_rate": 1.6179267445176206e-06,
"loss": 0.2754,
"step": 1190
},
{
"epoch": 2.28864,
"grad_norm": 0.5292414426803589,
"learning_rate": 1.6096950150405454e-06,
"loss": 0.2847,
"step": 1191
},
{
"epoch": 2.29056,
"grad_norm": 0.46359482407569885,
"learning_rate": 1.6014802603420044e-06,
"loss": 0.2203,
"step": 1192
},
{
"epoch": 2.29248,
"grad_norm": 0.49030157923698425,
"learning_rate": 1.593282521552113e-06,
"loss": 0.2612,
"step": 1193
},
{
"epoch": 2.2944,
"grad_norm": 0.4976269602775574,
"learning_rate": 1.5851018397157918e-06,
"loss": 0.2722,
"step": 1194
},
{
"epoch": 2.29632,
"grad_norm": 0.5036548376083374,
"learning_rate": 1.5769382557925612e-06,
"loss": 0.2623,
"step": 1195
},
{
"epoch": 2.29824,
"grad_norm": 0.5187435746192932,
"learning_rate": 1.5687918106563326e-06,
"loss": 0.2762,
"step": 1196
},
{
"epoch": 2.30016,
"grad_norm": 0.5432221293449402,
"learning_rate": 1.5606625450952062e-06,
"loss": 0.3863,
"step": 1197
},
{
"epoch": 2.30208,
"grad_norm": 0.4813677668571472,
"learning_rate": 1.5525504998112717e-06,
"loss": 0.2837,
"step": 1198
},
{
"epoch": 2.304,
"grad_norm": 0.5069709420204163,
"learning_rate": 1.5444557154203892e-06,
"loss": 0.271,
"step": 1199
},
{
"epoch": 2.30592,
"grad_norm": 0.5508760213851929,
"learning_rate": 1.5363782324520033e-06,
"loss": 0.3548,
"step": 1200
},
{
"epoch": 2.30784,
"grad_norm": 0.4824974834918976,
"learning_rate": 1.5283180913489326e-06,
"loss": 0.2822,
"step": 1201
},
{
"epoch": 2.30976,
"grad_norm": 0.5263679027557373,
"learning_rate": 1.520275332467166e-06,
"loss": 0.275,
"step": 1202
},
{
"epoch": 2.31168,
"grad_norm": 0.49948692321777344,
"learning_rate": 1.5122499960756604e-06,
"loss": 0.2794,
"step": 1203
},
{
"epoch": 2.3136,
"grad_norm": 0.548800528049469,
"learning_rate": 1.504242122356143e-06,
"loss": 0.3468,
"step": 1204
},
{
"epoch": 2.3155200000000002,
"grad_norm": 0.6562162637710571,
"learning_rate": 1.4962517514029069e-06,
"loss": 0.4115,
"step": 1205
},
{
"epoch": 2.31744,
"grad_norm": 0.5661089420318604,
"learning_rate": 1.4882789232226124e-06,
"loss": 0.3043,
"step": 1206
},
{
"epoch": 2.31936,
"grad_norm": 0.5237375497817993,
"learning_rate": 1.4803236777340856e-06,
"loss": 0.282,
"step": 1207
},
{
"epoch": 2.32128,
"grad_norm": 0.5258256196975708,
"learning_rate": 1.4723860547681163e-06,
"loss": 0.3147,
"step": 1208
},
{
"epoch": 2.3232,
"grad_norm": 0.5062211155891418,
"learning_rate": 1.4644660940672628e-06,
"loss": 0.2561,
"step": 1209
},
{
"epoch": 2.32512,
"grad_norm": 0.5358169674873352,
"learning_rate": 1.4565638352856504e-06,
"loss": 0.2906,
"step": 1210
},
{
"epoch": 2.32704,
"grad_norm": 0.476710706949234,
"learning_rate": 1.4486793179887738e-06,
"loss": 0.2662,
"step": 1211
},
{
"epoch": 2.32896,
"grad_norm": 0.539723813533783,
"learning_rate": 1.4408125816532981e-06,
"loss": 0.3095,
"step": 1212
},
{
"epoch": 2.33088,
"grad_norm": 0.4852389693260193,
"learning_rate": 1.4329636656668617e-06,
"loss": 0.2799,
"step": 1213
},
{
"epoch": 2.3327999999999998,
"grad_norm": 0.5488572716712952,
"learning_rate": 1.4251326093278773e-06,
"loss": 0.3195,
"step": 1214
},
{
"epoch": 2.33472,
"grad_norm": 0.5351449251174927,
"learning_rate": 1.4173194518453415e-06,
"loss": 0.3173,
"step": 1215
},
{
"epoch": 2.33664,
"grad_norm": 0.4634568691253662,
"learning_rate": 1.4095242323386305e-06,
"loss": 0.2497,
"step": 1216
},
{
"epoch": 2.33856,
"grad_norm": 0.4708751440048218,
"learning_rate": 1.4017469898373077e-06,
"loss": 0.2535,
"step": 1217
},
{
"epoch": 2.34048,
"grad_norm": 0.5249441862106323,
"learning_rate": 1.3939877632809279e-06,
"loss": 0.2698,
"step": 1218
},
{
"epoch": 2.3424,
"grad_norm": 0.5238059163093567,
"learning_rate": 1.3862465915188427e-06,
"loss": 0.312,
"step": 1219
},
{
"epoch": 2.34432,
"grad_norm": 0.48658016324043274,
"learning_rate": 1.3785235133100088e-06,
"loss": 0.2763,
"step": 1220
},
{
"epoch": 2.34624,
"grad_norm": 0.49284628033638,
"learning_rate": 1.3708185673227896e-06,
"loss": 0.2413,
"step": 1221
},
{
"epoch": 2.34816,
"grad_norm": 0.5301333665847778,
"learning_rate": 1.3631317921347564e-06,
"loss": 0.2995,
"step": 1222
},
{
"epoch": 2.35008,
"grad_norm": 0.5061814188957214,
"learning_rate": 1.3554632262325129e-06,
"loss": 0.2836,
"step": 1223
},
{
"epoch": 2.352,
"grad_norm": 0.4877197742462158,
"learning_rate": 1.347812908011485e-06,
"loss": 0.2801,
"step": 1224
},
{
"epoch": 2.35392,
"grad_norm": 0.4739716649055481,
"learning_rate": 1.340180875775735e-06,
"loss": 0.2558,
"step": 1225
},
{
"epoch": 2.35584,
"grad_norm": 0.47851693630218506,
"learning_rate": 1.3325671677377727e-06,
"loss": 0.2635,
"step": 1226
},
{
"epoch": 2.35776,
"grad_norm": 0.5251500606536865,
"learning_rate": 1.3249718220183583e-06,
"loss": 0.3403,
"step": 1227
},
{
"epoch": 2.35968,
"grad_norm": 0.4584653079509735,
"learning_rate": 1.3173948766463146e-06,
"loss": 0.2354,
"step": 1228
},
{
"epoch": 2.3616,
"grad_norm": 0.4903619885444641,
"learning_rate": 1.309836369558341e-06,
"loss": 0.2497,
"step": 1229
},
{
"epoch": 2.36352,
"grad_norm": 0.547564685344696,
"learning_rate": 1.3022963385988153e-06,
"loss": 0.3405,
"step": 1230
},
{
"epoch": 2.36544,
"grad_norm": 0.5136017799377441,
"learning_rate": 1.2947748215196038e-06,
"loss": 0.2827,
"step": 1231
},
{
"epoch": 2.36736,
"grad_norm": 0.5517025589942932,
"learning_rate": 1.2872718559798852e-06,
"loss": 0.3192,
"step": 1232
},
{
"epoch": 2.36928,
"grad_norm": 0.4797460734844208,
"learning_rate": 1.2797874795459464e-06,
"loss": 0.2211,
"step": 1233
},
{
"epoch": 2.3712,
"grad_norm": 0.49123114347457886,
"learning_rate": 1.2723217296910078e-06,
"loss": 0.2737,
"step": 1234
},
{
"epoch": 2.37312,
"grad_norm": 0.5138117074966431,
"learning_rate": 1.264874643795021e-06,
"loss": 0.2977,
"step": 1235
},
{
"epoch": 2.37504,
"grad_norm": 0.480654239654541,
"learning_rate": 1.257446259144494e-06,
"loss": 0.2462,
"step": 1236
},
{
"epoch": 2.37696,
"grad_norm": 0.5112031698226929,
"learning_rate": 1.2500366129323039e-06,
"loss": 0.2923,
"step": 1237
},
{
"epoch": 2.37888,
"grad_norm": 0.4766487181186676,
"learning_rate": 1.2426457422575e-06,
"loss": 0.2326,
"step": 1238
},
{
"epoch": 2.3808,
"grad_norm": 0.5113689303398132,
"learning_rate": 1.2352736841251306e-06,
"loss": 0.308,
"step": 1239
},
{
"epoch": 2.38272,
"grad_norm": 0.5277336239814758,
"learning_rate": 1.2279204754460494e-06,
"loss": 0.3104,
"step": 1240
},
{
"epoch": 2.38464,
"grad_norm": 0.5538479685783386,
"learning_rate": 1.2205861530367342e-06,
"loss": 0.292,
"step": 1241
},
{
"epoch": 2.3865600000000002,
"grad_norm": 0.49769070744514465,
"learning_rate": 1.2132707536191008e-06,
"loss": 0.232,
"step": 1242
},
{
"epoch": 2.38848,
"grad_norm": 0.5033958554267883,
"learning_rate": 1.2059743138203256e-06,
"loss": 0.2677,
"step": 1243
},
{
"epoch": 2.3904,
"grad_norm": 0.5443189144134521,
"learning_rate": 1.1986968701726492e-06,
"loss": 0.312,
"step": 1244
},
{
"epoch": 2.39232,
"grad_norm": 0.5479001402854919,
"learning_rate": 1.1914384591132045e-06,
"loss": 0.3074,
"step": 1245
},
{
"epoch": 2.39424,
"grad_norm": 0.5138202905654907,
"learning_rate": 1.184199116983834e-06,
"loss": 0.291,
"step": 1246
},
{
"epoch": 2.39616,
"grad_norm": 0.4831412434577942,
"learning_rate": 1.1769788800309001e-06,
"loss": 0.2913,
"step": 1247
},
{
"epoch": 2.39808,
"grad_norm": 0.5141727924346924,
"learning_rate": 1.1697777844051105e-06,
"loss": 0.3379,
"step": 1248
},
{
"epoch": 2.4,
"grad_norm": 0.5140635371208191,
"learning_rate": 1.1625958661613345e-06,
"loss": 0.2928,
"step": 1249
},
{
"epoch": 2.40192,
"grad_norm": 0.5522688627243042,
"learning_rate": 1.1554331612584218e-06,
"loss": 0.3514,
"step": 1250
},
{
"epoch": 2.4038399999999998,
"grad_norm": 0.49213460087776184,
"learning_rate": 1.1482897055590275e-06,
"loss": 0.2611,
"step": 1251
},
{
"epoch": 2.40576,
"grad_norm": 0.5185586810112,
"learning_rate": 1.141165534829425e-06,
"loss": 0.2871,
"step": 1252
},
{
"epoch": 2.40768,
"grad_norm": 0.5209627747535706,
"learning_rate": 1.134060684739331e-06,
"loss": 0.2827,
"step": 1253
},
{
"epoch": 2.4096,
"grad_norm": 0.535041093826294,
"learning_rate": 1.1269751908617277e-06,
"loss": 0.3143,
"step": 1254
},
{
"epoch": 2.41152,
"grad_norm": 0.5376631021499634,
"learning_rate": 1.119909088672682e-06,
"loss": 0.2936,
"step": 1255
},
{
"epoch": 2.41344,
"grad_norm": 0.5358872413635254,
"learning_rate": 1.1128624135511712e-06,
"loss": 0.2658,
"step": 1256
},
{
"epoch": 2.41536,
"grad_norm": 0.5552674531936646,
"learning_rate": 1.105835200778902e-06,
"loss": 0.3018,
"step": 1257
},
{
"epoch": 2.41728,
"grad_norm": 0.5257450938224792,
"learning_rate": 1.0988274855401377e-06,
"loss": 0.2974,
"step": 1258
},
{
"epoch": 2.4192,
"grad_norm": 0.5063558220863342,
"learning_rate": 1.091839302921518e-06,
"loss": 0.2799,
"step": 1259
},
{
"epoch": 2.42112,
"grad_norm": 0.5337387323379517,
"learning_rate": 1.0848706879118893e-06,
"loss": 0.2926,
"step": 1260
},
{
"epoch": 2.42304,
"grad_norm": 0.4982994496822357,
"learning_rate": 1.0779216754021215e-06,
"loss": 0.2738,
"step": 1261
},
{
"epoch": 2.42496,
"grad_norm": 0.49282708764076233,
"learning_rate": 1.070992300184941e-06,
"loss": 0.2623,
"step": 1262
},
{
"epoch": 2.42688,
"grad_norm": 0.48084521293640137,
"learning_rate": 1.0640825969547498e-06,
"loss": 0.2811,
"step": 1263
},
{
"epoch": 2.4288,
"grad_norm": 0.5474672913551331,
"learning_rate": 1.057192600307456e-06,
"loss": 0.3692,
"step": 1264
},
{
"epoch": 2.43072,
"grad_norm": 0.4835873544216156,
"learning_rate": 1.0503223447403033e-06,
"loss": 0.247,
"step": 1265
},
{
"epoch": 2.43264,
"grad_norm": 0.5266591310501099,
"learning_rate": 1.0434718646516917e-06,
"loss": 0.3301,
"step": 1266
},
{
"epoch": 2.43456,
"grad_norm": 0.5426017642021179,
"learning_rate": 1.0366411943410033e-06,
"loss": 0.3254,
"step": 1267
},
{
"epoch": 2.43648,
"grad_norm": 0.49996837973594666,
"learning_rate": 1.0298303680084448e-06,
"loss": 0.2392,
"step": 1268
},
{
"epoch": 2.4384,
"grad_norm": 0.5440322160720825,
"learning_rate": 1.0230394197548605e-06,
"loss": 0.2541,
"step": 1269
},
{
"epoch": 2.44032,
"grad_norm": 0.5115669369697571,
"learning_rate": 1.0162683835815706e-06,
"loss": 0.28,
"step": 1270
},
{
"epoch": 2.44224,
"grad_norm": 0.5454736351966858,
"learning_rate": 1.009517293390197e-06,
"loss": 0.2788,
"step": 1271
},
{
"epoch": 2.44416,
"grad_norm": 0.4783473312854767,
"learning_rate": 1.0027861829824953e-06,
"loss": 0.2315,
"step": 1272
},
{
"epoch": 2.44608,
"grad_norm": 0.5672537684440613,
"learning_rate": 9.960750860601842e-07,
"loss": 0.3374,
"step": 1273
},
{
"epoch": 2.448,
"grad_norm": 0.5574640035629272,
"learning_rate": 9.893840362247809e-07,
"loss": 0.2754,
"step": 1274
},
{
"epoch": 2.44992,
"grad_norm": 0.4954107403755188,
"learning_rate": 9.82713066977427e-07,
"loss": 0.2415,
"step": 1275
},
{
"epoch": 2.45184,
"grad_norm": 0.5778104662895203,
"learning_rate": 9.760622117187234e-07,
"loss": 0.3354,
"step": 1276
},
{
"epoch": 2.45376,
"grad_norm": 0.5369322299957275,
"learning_rate": 9.694315037485635e-07,
"loss": 0.3129,
"step": 1277
},
{
"epoch": 2.45568,
"grad_norm": 0.5030196309089661,
"learning_rate": 9.628209762659658e-07,
"loss": 0.2824,
"step": 1278
},
{
"epoch": 2.4576000000000002,
"grad_norm": 0.4862186312675476,
"learning_rate": 9.562306623689111e-07,
"loss": 0.2411,
"step": 1279
},
{
"epoch": 2.45952,
"grad_norm": 0.5337828397750854,
"learning_rate": 9.496605950541676e-07,
"loss": 0.2719,
"step": 1280
},
{
"epoch": 2.46144,
"grad_norm": 0.475655198097229,
"learning_rate": 9.431108072171346e-07,
"loss": 0.2166,
"step": 1281
},
{
"epoch": 2.4633599999999998,
"grad_norm": 0.5359756946563721,
"learning_rate": 9.365813316516787e-07,
"loss": 0.2718,
"step": 1282
},
{
"epoch": 2.46528,
"grad_norm": 0.512302815914154,
"learning_rate": 9.300722010499608e-07,
"loss": 0.2605,
"step": 1283
},
{
"epoch": 2.4672,
"grad_norm": 0.46624261140823364,
"learning_rate": 9.235834480022788e-07,
"loss": 0.2125,
"step": 1284
},
{
"epoch": 2.46912,
"grad_norm": 0.5139369368553162,
"learning_rate": 9.17115104996903e-07,
"loss": 0.2827,
"step": 1285
},
{
"epoch": 2.47104,
"grad_norm": 0.5012480616569519,
"learning_rate": 9.10667204419915e-07,
"loss": 0.2683,
"step": 1286
},
{
"epoch": 2.47296,
"grad_norm": 0.476575642824173,
"learning_rate": 9.042397785550405e-07,
"loss": 0.2793,
"step": 1287
},
{
"epoch": 2.47488,
"grad_norm": 0.49789562821388245,
"learning_rate": 8.978328595834984e-07,
"loss": 0.2677,
"step": 1288
},
{
"epoch": 2.4768,
"grad_norm": 0.5309513807296753,
"learning_rate": 8.91446479583823e-07,
"loss": 0.2874,
"step": 1289
},
{
"epoch": 2.47872,
"grad_norm": 0.5617191791534424,
"learning_rate": 8.850806705317183e-07,
"loss": 0.3385,
"step": 1290
},
{
"epoch": 2.48064,
"grad_norm": 0.5113781690597534,
"learning_rate": 8.787354642998936e-07,
"loss": 0.2252,
"step": 1291
},
{
"epoch": 2.48256,
"grad_norm": 0.5043871402740479,
"learning_rate": 8.724108926579e-07,
"loss": 0.2483,
"step": 1292
},
{
"epoch": 2.48448,
"grad_norm": 0.5385339856147766,
"learning_rate": 8.661069872719746e-07,
"loss": 0.2993,
"step": 1293
},
{
"epoch": 2.4864,
"grad_norm": 0.5282170176506042,
"learning_rate": 8.598237797048825e-07,
"loss": 0.303,
"step": 1294
},
{
"epoch": 2.48832,
"grad_norm": 0.5155388712882996,
"learning_rate": 8.535613014157556e-07,
"loss": 0.2687,
"step": 1295
},
{
"epoch": 2.49024,
"grad_norm": 0.5517279505729675,
"learning_rate": 8.473195837599419e-07,
"loss": 0.3153,
"step": 1296
},
{
"epoch": 2.49216,
"grad_norm": 0.5516957640647888,
"learning_rate": 8.410986579888381e-07,
"loss": 0.3159,
"step": 1297
},
{
"epoch": 2.49408,
"grad_norm": 0.5034341216087341,
"learning_rate": 8.348985552497424e-07,
"loss": 0.2983,
"step": 1298
},
{
"epoch": 2.496,
"grad_norm": 14.006168365478516,
"learning_rate": 8.287193065856936e-07,
"loss": 0.6145,
"step": 1299
},
{
"epoch": 2.49792,
"grad_norm": 0.506659209728241,
"learning_rate": 8.225609429353187e-07,
"loss": 0.301,
"step": 1300
},
{
"epoch": 2.49984,
"grad_norm": 0.5285565853118896,
"learning_rate": 8.164234951326727e-07,
"loss": 0.2985,
"step": 1301
},
{
"epoch": 2.50176,
"grad_norm": 0.5034978985786438,
"learning_rate": 8.103069939070945e-07,
"loss": 0.2781,
"step": 1302
},
{
"epoch": 2.50368,
"grad_norm": 0.5425746440887451,
"learning_rate": 8.042114698830394e-07,
"loss": 0.3129,
"step": 1303
},
{
"epoch": 2.5056000000000003,
"grad_norm": 0.5301453471183777,
"learning_rate": 7.981369535799354e-07,
"loss": 0.2879,
"step": 1304
},
{
"epoch": 2.50752,
"grad_norm": 0.5089478492736816,
"learning_rate": 7.920834754120305e-07,
"loss": 0.2931,
"step": 1305
},
{
"epoch": 2.50944,
"grad_norm": 0.4932142198085785,
"learning_rate": 7.860510656882342e-07,
"loss": 0.2951,
"step": 1306
},
{
"epoch": 2.51136,
"grad_norm": 0.5140165686607361,
"learning_rate": 7.800397546119709e-07,
"loss": 0.2996,
"step": 1307
},
{
"epoch": 2.51328,
"grad_norm": 0.4979294538497925,
"learning_rate": 7.740495722810271e-07,
"loss": 0.2703,
"step": 1308
},
{
"epoch": 2.5152,
"grad_norm": 0.5070496201515198,
"learning_rate": 7.680805486873977e-07,
"loss": 0.2872,
"step": 1309
},
{
"epoch": 2.5171200000000002,
"grad_norm": 0.4756541848182678,
"learning_rate": 7.621327137171447e-07,
"loss": 0.2583,
"step": 1310
},
{
"epoch": 2.51904,
"grad_norm": 0.5040149688720703,
"learning_rate": 7.562060971502383e-07,
"loss": 0.2762,
"step": 1311
},
{
"epoch": 2.52096,
"grad_norm": 0.48779425024986267,
"learning_rate": 7.50300728660407e-07,
"loss": 0.286,
"step": 1312
},
{
"epoch": 2.52288,
"grad_norm": 0.5339841842651367,
"learning_rate": 7.444166378150014e-07,
"loss": 0.3246,
"step": 1313
},
{
"epoch": 2.5248,
"grad_norm": 0.5055726766586304,
"learning_rate": 7.385538540748327e-07,
"loss": 0.2946,
"step": 1314
},
{
"epoch": 2.52672,
"grad_norm": 0.5034390091896057,
"learning_rate": 7.327124067940311e-07,
"loss": 0.2669,
"step": 1315
},
{
"epoch": 2.52864,
"grad_norm": 0.5152619481086731,
"learning_rate": 7.26892325219899e-07,
"loss": 0.2914,
"step": 1316
},
{
"epoch": 2.53056,
"grad_norm": 0.5021916627883911,
"learning_rate": 7.210936384927631e-07,
"loss": 0.2699,
"step": 1317
},
{
"epoch": 2.53248,
"grad_norm": 0.5259951949119568,
"learning_rate": 7.153163756458287e-07,
"loss": 0.3096,
"step": 1318
},
{
"epoch": 2.5343999999999998,
"grad_norm": 0.5437947511672974,
"learning_rate": 7.09560565605037e-07,
"loss": 0.3501,
"step": 1319
},
{
"epoch": 2.53632,
"grad_norm": 0.5202975273132324,
"learning_rate": 7.03826237188916e-07,
"loss": 0.2785,
"step": 1320
},
{
"epoch": 2.53824,
"grad_norm": 0.4808063507080078,
"learning_rate": 6.981134191084388e-07,
"loss": 0.2399,
"step": 1321
},
{
"epoch": 2.54016,
"grad_norm": 0.5385096669197083,
"learning_rate": 6.924221399668785e-07,
"loss": 0.2983,
"step": 1322
},
{
"epoch": 2.54208,
"grad_norm": 0.4778071343898773,
"learning_rate": 6.867524282596655e-07,
"loss": 0.2601,
"step": 1323
},
{
"epoch": 2.544,
"grad_norm": 0.4848290681838989,
"learning_rate": 6.811043123742494e-07,
"loss": 0.2602,
"step": 1324
},
{
"epoch": 2.5459199999999997,
"grad_norm": 0.5069729685783386,
"learning_rate": 6.754778205899465e-07,
"loss": 0.2978,
"step": 1325
},
{
"epoch": 2.54784,
"grad_norm": 0.5477489829063416,
"learning_rate": 6.698729810778065e-07,
"loss": 0.324,
"step": 1326
},
{
"epoch": 2.54976,
"grad_norm": 0.46972790360450745,
"learning_rate": 6.642898219004723e-07,
"loss": 0.2633,
"step": 1327
},
{
"epoch": 2.55168,
"grad_norm": 0.5651832818984985,
"learning_rate": 6.587283710120324e-07,
"loss": 0.2985,
"step": 1328
},
{
"epoch": 2.5536,
"grad_norm": 0.541215717792511,
"learning_rate": 6.531886562578859e-07,
"loss": 0.3101,
"step": 1329
},
{
"epoch": 2.55552,
"grad_norm": 0.5124194622039795,
"learning_rate": 6.47670705374604e-07,
"loss": 0.2532,
"step": 1330
},
{
"epoch": 2.55744,
"grad_norm": 0.5186104774475098,
"learning_rate": 6.421745459897871e-07,
"loss": 0.3051,
"step": 1331
},
{
"epoch": 2.55936,
"grad_norm": 0.5144189596176147,
"learning_rate": 6.367002056219285e-07,
"loss": 0.2968,
"step": 1332
},
{
"epoch": 2.56128,
"grad_norm": 0.49063295125961304,
"learning_rate": 6.312477116802807e-07,
"loss": 0.2767,
"step": 1333
},
{
"epoch": 2.5632,
"grad_norm": 0.5266640782356262,
"learning_rate": 6.258170914647077e-07,
"loss": 0.2826,
"step": 1334
},
{
"epoch": 2.56512,
"grad_norm": 0.5236597061157227,
"learning_rate": 6.204083721655607e-07,
"loss": 0.3224,
"step": 1335
},
{
"epoch": 2.56704,
"grad_norm": 0.5012643337249756,
"learning_rate": 6.150215808635334e-07,
"loss": 0.2776,
"step": 1336
},
{
"epoch": 2.56896,
"grad_norm": 0.49152541160583496,
"learning_rate": 6.096567445295298e-07,
"loss": 0.2826,
"step": 1337
},
{
"epoch": 2.57088,
"grad_norm": 0.4615619480609894,
"learning_rate": 6.043138900245277e-07,
"loss": 0.2232,
"step": 1338
},
{
"epoch": 2.5728,
"grad_norm": 0.491543173789978,
"learning_rate": 5.989930440994451e-07,
"loss": 0.2845,
"step": 1339
},
{
"epoch": 2.57472,
"grad_norm": 0.5158467888832092,
"learning_rate": 5.936942333950063e-07,
"loss": 0.2993,
"step": 1340
},
{
"epoch": 2.5766400000000003,
"grad_norm": 0.5210102796554565,
"learning_rate": 5.884174844416102e-07,
"loss": 0.2803,
"step": 1341
},
{
"epoch": 2.57856,
"grad_norm": 0.4547671675682068,
"learning_rate": 5.831628236591929e-07,
"loss": 0.2458,
"step": 1342
},
{
"epoch": 2.58048,
"grad_norm": 0.5124427676200867,
"learning_rate": 5.779302773570994e-07,
"loss": 0.2852,
"step": 1343
},
{
"epoch": 2.5824,
"grad_norm": 0.5720849633216858,
"learning_rate": 5.727198717339511e-07,
"loss": 0.3027,
"step": 1344
},
{
"epoch": 2.58432,
"grad_norm": 0.4653509259223938,
"learning_rate": 5.675316328775126e-07,
"loss": 0.2425,
"step": 1345
},
{
"epoch": 2.58624,
"grad_norm": 0.5525123476982117,
"learning_rate": 5.623655867645628e-07,
"loss": 0.355,
"step": 1346
},
{
"epoch": 2.5881600000000002,
"grad_norm": 0.5052363276481628,
"learning_rate": 5.572217592607687e-07,
"loss": 0.2744,
"step": 1347
},
{
"epoch": 2.59008,
"grad_norm": 0.5178247094154358,
"learning_rate": 5.521001761205441e-07,
"loss": 0.2609,
"step": 1348
},
{
"epoch": 2.592,
"grad_norm": 0.5181273818016052,
"learning_rate": 5.470008629869367e-07,
"loss": 0.2862,
"step": 1349
},
{
"epoch": 2.59392,
"grad_norm": 0.5403995513916016,
"learning_rate": 5.41923845391486e-07,
"loss": 0.2896,
"step": 1350
},
{
"epoch": 2.59584,
"grad_norm": 0.5276386141777039,
"learning_rate": 5.368691487541027e-07,
"loss": 0.3037,
"step": 1351
},
{
"epoch": 2.59776,
"grad_norm": 0.5122671127319336,
"learning_rate": 5.318367983829393e-07,
"loss": 0.2981,
"step": 1352
},
{
"epoch": 2.59968,
"grad_norm": 0.5177205801010132,
"learning_rate": 5.268268194742638e-07,
"loss": 0.2787,
"step": 1353
},
{
"epoch": 2.6016,
"grad_norm": 0.5190232396125793,
"learning_rate": 5.218392371123326e-07,
"loss": 0.322,
"step": 1354
},
{
"epoch": 2.60352,
"grad_norm": 0.4875623285770416,
"learning_rate": 5.168740762692681e-07,
"loss": 0.2344,
"step": 1355
},
{
"epoch": 2.6054399999999998,
"grad_norm": 0.5116649270057678,
"learning_rate": 5.119313618049309e-07,
"loss": 0.2516,
"step": 1356
},
{
"epoch": 2.60736,
"grad_norm": 0.5162714719772339,
"learning_rate": 5.070111184667908e-07,
"loss": 0.2877,
"step": 1357
},
{
"epoch": 2.60928,
"grad_norm": 0.49368512630462646,
"learning_rate": 5.021133708898146e-07,
"loss": 0.2753,
"step": 1358
},
{
"epoch": 2.6112,
"grad_norm": 0.5159112811088562,
"learning_rate": 4.972381435963331e-07,
"loss": 0.29,
"step": 1359
},
{
"epoch": 2.61312,
"grad_norm": 0.551774799823761,
"learning_rate": 4.9238546099592e-07,
"loss": 0.3648,
"step": 1360
},
{
"epoch": 2.61504,
"grad_norm": 0.516254186630249,
"learning_rate": 4.875553473852735e-07,
"loss": 0.247,
"step": 1361
},
{
"epoch": 2.6169599999999997,
"grad_norm": 0.4950300455093384,
"learning_rate": 4.827478269480895e-07,
"loss": 0.2632,
"step": 1362
},
{
"epoch": 2.61888,
"grad_norm": 0.4841112792491913,
"learning_rate": 4.779629237549438e-07,
"loss": 0.325,
"step": 1363
},
{
"epoch": 2.6208,
"grad_norm": 0.46128109097480774,
"learning_rate": 4.732006617631729e-07,
"loss": 0.2376,
"step": 1364
},
{
"epoch": 2.62272,
"grad_norm": 0.4796451926231384,
"learning_rate": 4.6846106481675035e-07,
"loss": 0.2601,
"step": 1365
},
{
"epoch": 2.62464,
"grad_norm": 0.525676429271698,
"learning_rate": 4.637441566461681e-07,
"loss": 0.3528,
"step": 1366
},
{
"epoch": 2.62656,
"grad_norm": 0.48877784609794617,
"learning_rate": 4.590499608683202e-07,
"loss": 0.2692,
"step": 1367
},
{
"epoch": 2.62848,
"grad_norm": 0.49372032284736633,
"learning_rate": 4.54378500986381e-07,
"loss": 0.2869,
"step": 1368
},
{
"epoch": 2.6304,
"grad_norm": 0.5529400706291199,
"learning_rate": 4.497298003896944e-07,
"loss": 0.3423,
"step": 1369
},
{
"epoch": 2.63232,
"grad_norm": 0.4859146177768707,
"learning_rate": 4.451038823536441e-07,
"loss": 0.2561,
"step": 1370
},
{
"epoch": 2.63424,
"grad_norm": 0.8827998042106628,
"learning_rate": 4.405007700395497e-07,
"loss": 0.3867,
"step": 1371
},
{
"epoch": 2.63616,
"grad_norm": 0.5259866118431091,
"learning_rate": 4.35920486494546e-07,
"loss": 0.2746,
"step": 1372
},
{
"epoch": 2.63808,
"grad_norm": 0.5051853656768799,
"learning_rate": 4.313630546514663e-07,
"loss": 0.2777,
"step": 1373
},
{
"epoch": 2.64,
"grad_norm": 0.504035472869873,
"learning_rate": 4.268284973287273e-07,
"loss": 0.2655,
"step": 1374
},
{
"epoch": 2.64192,
"grad_norm": 0.5248432755470276,
"learning_rate": 4.223168372302189e-07,
"loss": 0.2681,
"step": 1375
},
{
"epoch": 2.64384,
"grad_norm": 0.4911769926548004,
"learning_rate": 4.1782809694518533e-07,
"loss": 0.274,
"step": 1376
},
{
"epoch": 2.64576,
"grad_norm": 0.5364853143692017,
"learning_rate": 4.1336229894811454e-07,
"loss": 0.3223,
"step": 1377
},
{
"epoch": 2.6476800000000003,
"grad_norm": 0.4902110993862152,
"learning_rate": 4.089194655986306e-07,
"loss": 0.2572,
"step": 1378
},
{
"epoch": 2.6496,
"grad_norm": 0.5714824199676514,
"learning_rate": 4.044996191413686e-07,
"loss": 0.3832,
"step": 1379
},
{
"epoch": 2.65152,
"grad_norm": 0.5229995846748352,
"learning_rate": 4.001027817058789e-07,
"loss": 0.2897,
"step": 1380
},
{
"epoch": 2.65344,
"grad_norm": 0.5065412521362305,
"learning_rate": 3.957289753065052e-07,
"loss": 0.256,
"step": 1381
},
{
"epoch": 2.65536,
"grad_norm": 0.5410925149917603,
"learning_rate": 3.9137822184227845e-07,
"loss": 0.3415,
"step": 1382
},
{
"epoch": 2.65728,
"grad_norm": 0.4742574095726013,
"learning_rate": 3.870505430968069e-07,
"loss": 0.2727,
"step": 1383
},
{
"epoch": 2.6592000000000002,
"grad_norm": 0.4873232841491699,
"learning_rate": 3.8274596073816784e-07,
"loss": 0.2784,
"step": 1384
},
{
"epoch": 2.66112,
"grad_norm": 0.5234816074371338,
"learning_rate": 3.7846449631879664e-07,
"loss": 0.3175,
"step": 1385
},
{
"epoch": 2.66304,
"grad_norm": 0.5284846425056458,
"learning_rate": 3.742061712753825e-07,
"loss": 0.2732,
"step": 1386
},
{
"epoch": 2.6649599999999998,
"grad_norm": 0.5220972299575806,
"learning_rate": 3.699710069287571e-07,
"loss": 0.2872,
"step": 1387
},
{
"epoch": 2.66688,
"grad_norm": 0.5483995676040649,
"learning_rate": 3.657590244837911e-07,
"loss": 0.3075,
"step": 1388
},
{
"epoch": 2.6688,
"grad_norm": 0.5797990560531616,
"learning_rate": 3.615702450292857e-07,
"loss": 0.3204,
"step": 1389
},
{
"epoch": 2.67072,
"grad_norm": 0.5336170196533203,
"learning_rate": 3.5740468953786854e-07,
"loss": 0.286,
"step": 1390
},
{
"epoch": 2.67264,
"grad_norm": 0.5327572226524353,
"learning_rate": 3.5326237886588734e-07,
"loss": 0.2731,
"step": 1391
},
{
"epoch": 2.67456,
"grad_norm": 0.4613768458366394,
"learning_rate": 3.49143333753309e-07,
"loss": 0.2396,
"step": 1392
},
{
"epoch": 2.6764799999999997,
"grad_norm": 0.47034865617752075,
"learning_rate": 3.4504757482360817e-07,
"loss": 0.2523,
"step": 1393
},
{
"epoch": 2.6784,
"grad_norm": 0.5069217085838318,
"learning_rate": 3.4097512258367385e-07,
"loss": 0.2735,
"step": 1394
},
{
"epoch": 2.68032,
"grad_norm": 0.5629119277000427,
"learning_rate": 3.369259974236988e-07,
"loss": 0.3389,
"step": 1395
},
{
"epoch": 2.68224,
"grad_norm": 0.532002329826355,
"learning_rate": 3.3290021961708163e-07,
"loss": 0.2793,
"step": 1396
},
{
"epoch": 2.68416,
"grad_norm": 0.5260536670684814,
"learning_rate": 3.288978093203227e-07,
"loss": 0.2739,
"step": 1397
},
{
"epoch": 2.68608,
"grad_norm": 0.5093713402748108,
"learning_rate": 3.2491878657292643e-07,
"loss": 0.2666,
"step": 1398
},
{
"epoch": 2.6879999999999997,
"grad_norm": 0.47211265563964844,
"learning_rate": 3.209631712972966e-07,
"loss": 0.2458,
"step": 1399
},
{
"epoch": 2.68992,
"grad_norm": 0.5226900577545166,
"learning_rate": 3.1703098329864237e-07,
"loss": 0.3603,
"step": 1400
},
{
"epoch": 2.69184,
"grad_norm": 0.49710115790367126,
"learning_rate": 3.131222422648744e-07,
"loss": 0.2672,
"step": 1401
},
{
"epoch": 2.69376,
"grad_norm": 0.5201264023780823,
"learning_rate": 3.0923696776650414e-07,
"loss": 0.2813,
"step": 1402
},
{
"epoch": 2.69568,
"grad_norm": 0.545404314994812,
"learning_rate": 3.0537517925655567e-07,
"loss": 0.3076,
"step": 1403
},
{
"epoch": 2.6976,
"grad_norm": 0.4871445596218109,
"learning_rate": 3.015368960704584e-07,
"loss": 0.2849,
"step": 1404
},
{
"epoch": 2.69952,
"grad_norm": 0.4660834074020386,
"learning_rate": 2.9772213742595367e-07,
"loss": 0.2707,
"step": 1405
},
{
"epoch": 2.70144,
"grad_norm": 0.5052247643470764,
"learning_rate": 2.9393092242300026e-07,
"loss": 0.2666,
"step": 1406
},
{
"epoch": 2.70336,
"grad_norm": 0.43773582577705383,
"learning_rate": 2.901632700436757e-07,
"loss": 0.222,
"step": 1407
},
{
"epoch": 2.70528,
"grad_norm": 0.48343104124069214,
"learning_rate": 2.864191991520848e-07,
"loss": 0.2669,
"step": 1408
},
{
"epoch": 2.7072000000000003,
"grad_norm": 0.5126166343688965,
"learning_rate": 2.8269872849426114e-07,
"loss": 0.2844,
"step": 1409
},
{
"epoch": 2.70912,
"grad_norm": 0.5321773290634155,
"learning_rate": 2.790018766980773e-07,
"loss": 0.3051,
"step": 1410
},
{
"epoch": 2.71104,
"grad_norm": 0.5006430149078369,
"learning_rate": 2.7532866227314714e-07,
"loss": 0.3082,
"step": 1411
},
{
"epoch": 2.71296,
"grad_norm": 0.4988725483417511,
"learning_rate": 2.71679103610738e-07,
"loss": 0.2579,
"step": 1412
},
{
"epoch": 2.71488,
"grad_norm": 0.47717005014419556,
"learning_rate": 2.6805321898367323e-07,
"loss": 0.2742,
"step": 1413
},
{
"epoch": 2.7168,
"grad_norm": 0.5387145280838013,
"learning_rate": 2.64451026546248e-07,
"loss": 0.3105,
"step": 1414
},
{
"epoch": 2.7187200000000002,
"grad_norm": 0.5160573720932007,
"learning_rate": 2.6087254433412924e-07,
"loss": 0.2877,
"step": 1415
},
{
"epoch": 2.72064,
"grad_norm": 0.5060129165649414,
"learning_rate": 2.573177902642726e-07,
"loss": 0.3247,
"step": 1416
},
{
"epoch": 2.72256,
"grad_norm": 0.4652881622314453,
"learning_rate": 2.5378678213483057e-07,
"loss": 0.2171,
"step": 1417
},
{
"epoch": 2.72448,
"grad_norm": 0.49938613176345825,
"learning_rate": 2.502795376250622e-07,
"loss": 0.2512,
"step": 1418
},
{
"epoch": 2.7264,
"grad_norm": 0.5143187046051025,
"learning_rate": 2.467960742952463e-07,
"loss": 0.3139,
"step": 1419
},
{
"epoch": 2.72832,
"grad_norm": 0.5032271146774292,
"learning_rate": 2.4333640958659144e-07,
"loss": 0.2856,
"step": 1420
},
{
"epoch": 2.7302400000000002,
"grad_norm": 0.5430501103401184,
"learning_rate": 2.399005608211502e-07,
"loss": 0.343,
"step": 1421
},
{
"epoch": 2.73216,
"grad_norm": 0.5197240710258484,
"learning_rate": 2.3648854520173237e-07,
"loss": 0.246,
"step": 1422
},
{
"epoch": 2.73408,
"grad_norm": 0.5261397957801819,
"learning_rate": 2.3310037981182088e-07,
"loss": 0.2787,
"step": 1423
},
{
"epoch": 2.7359999999999998,
"grad_norm": 0.4812126159667969,
"learning_rate": 2.2973608161547755e-07,
"loss": 0.2383,
"step": 1424
},
{
"epoch": 2.73792,
"grad_norm": 0.5413344502449036,
"learning_rate": 2.2639566745727203e-07,
"loss": 0.3292,
"step": 1425
},
{
"epoch": 2.73984,
"grad_norm": 0.4908527731895447,
"learning_rate": 2.2307915406218517e-07,
"loss": 0.2619,
"step": 1426
},
{
"epoch": 2.74176,
"grad_norm": 0.4955504536628723,
"learning_rate": 2.1978655803553128e-07,
"loss": 0.2525,
"step": 1427
},
{
"epoch": 2.74368,
"grad_norm": 0.5250062346458435,
"learning_rate": 2.1651789586287442e-07,
"loss": 0.3198,
"step": 1428
},
{
"epoch": 2.7456,
"grad_norm": 0.4920820891857147,
"learning_rate": 2.1327318390994445e-07,
"loss": 0.2885,
"step": 1429
},
{
"epoch": 2.7475199999999997,
"grad_norm": 0.5108388066291809,
"learning_rate": 2.1005243842255552e-07,
"loss": 0.2678,
"step": 1430
},
{
"epoch": 2.74944,
"grad_norm": 0.48679402470588684,
"learning_rate": 2.068556755265272e-07,
"loss": 0.2777,
"step": 1431
},
{
"epoch": 2.75136,
"grad_norm": 0.49243977665901184,
"learning_rate": 2.0368291122759898e-07,
"loss": 0.2672,
"step": 1432
},
{
"epoch": 2.75328,
"grad_norm": 0.5199057459831238,
"learning_rate": 2.005341614113543e-07,
"loss": 0.3326,
"step": 1433
},
{
"epoch": 2.7552,
"grad_norm": 0.5049248933792114,
"learning_rate": 1.9740944184313882e-07,
"loss": 0.3185,
"step": 1434
},
{
"epoch": 2.75712,
"grad_norm": 0.49474677443504333,
"learning_rate": 1.9430876816798228e-07,
"loss": 0.2864,
"step": 1435
},
{
"epoch": 2.75904,
"grad_norm": 0.48984795808792114,
"learning_rate": 1.9123215591052014e-07,
"loss": 0.2284,
"step": 1436
},
{
"epoch": 2.76096,
"grad_norm": 0.49480631947517395,
"learning_rate": 1.88179620474917e-07,
"loss": 0.2776,
"step": 1437
},
{
"epoch": 2.76288,
"grad_norm": 0.5034934282302856,
"learning_rate": 1.8515117714478447e-07,
"loss": 0.2955,
"step": 1438
},
{
"epoch": 2.7648,
"grad_norm": 0.5219425559043884,
"learning_rate": 1.8214684108311286e-07,
"loss": 0.2997,
"step": 1439
},
{
"epoch": 2.76672,
"grad_norm": 0.5076531767845154,
"learning_rate": 1.7916662733218848e-07,
"loss": 0.3018,
"step": 1440
},
{
"epoch": 2.76864,
"grad_norm": 0.4444115161895752,
"learning_rate": 1.762105508135198e-07,
"loss": 0.2022,
"step": 1441
},
{
"epoch": 2.77056,
"grad_norm": 0.5638416409492493,
"learning_rate": 1.7327862632776638e-07,
"loss": 0.3218,
"step": 1442
},
{
"epoch": 2.77248,
"grad_norm": 0.50645911693573,
"learning_rate": 1.7037086855465902e-07,
"loss": 0.2894,
"step": 1443
},
{
"epoch": 2.7744,
"grad_norm": 0.5264792442321777,
"learning_rate": 1.6748729205293024e-07,
"loss": 0.282,
"step": 1444
},
{
"epoch": 2.77632,
"grad_norm": 0.5501585006713867,
"learning_rate": 1.646279112602417e-07,
"loss": 0.3495,
"step": 1445
},
{
"epoch": 2.7782400000000003,
"grad_norm": 0.529294490814209,
"learning_rate": 1.6179274049310966e-07,
"loss": 0.2785,
"step": 1446
},
{
"epoch": 2.78016,
"grad_norm": 0.5129810571670532,
"learning_rate": 1.5898179394683244e-07,
"loss": 0.2385,
"step": 1447
},
{
"epoch": 2.78208,
"grad_norm": 0.5153532028198242,
"learning_rate": 1.5619508569542363e-07,
"loss": 0.2575,
"step": 1448
},
{
"epoch": 2.784,
"grad_norm": 0.49844181537628174,
"learning_rate": 1.5343262969153781e-07,
"loss": 0.2531,
"step": 1449
},
{
"epoch": 2.78592,
"grad_norm": 0.5551387667655945,
"learning_rate": 1.5069443976640287e-07,
"loss": 0.3086,
"step": 1450
},
{
"epoch": 2.78784,
"grad_norm": 0.48104503750801086,
"learning_rate": 1.4798052962974874e-07,
"loss": 0.2433,
"step": 1451
},
{
"epoch": 2.7897600000000002,
"grad_norm": 0.5029877424240112,
"learning_rate": 1.4529091286973994e-07,
"loss": 0.2636,
"step": 1452
},
{
"epoch": 2.79168,
"grad_norm": 0.46708324551582336,
"learning_rate": 1.4262560295290884e-07,
"loss": 0.26,
"step": 1453
},
{
"epoch": 2.7936,
"grad_norm": 0.5143520832061768,
"learning_rate": 1.3998461322408563e-07,
"loss": 0.2669,
"step": 1454
},
{
"epoch": 2.79552,
"grad_norm": 0.45702338218688965,
"learning_rate": 1.3736795690633353e-07,
"loss": 0.21,
"step": 1455
},
{
"epoch": 2.79744,
"grad_norm": 0.5258322954177856,
"learning_rate": 1.3477564710088097e-07,
"loss": 0.3053,
"step": 1456
},
{
"epoch": 2.79936,
"grad_norm": 0.5339749455451965,
"learning_rate": 1.3220769678705724e-07,
"loss": 0.3053,
"step": 1457
},
{
"epoch": 2.80128,
"grad_norm": 0.4626672863960266,
"learning_rate": 1.2966411882222695e-07,
"loss": 0.2055,
"step": 1458
},
{
"epoch": 2.8032,
"grad_norm": 0.5238644480705261,
"learning_rate": 1.271449259417268e-07,
"loss": 0.3367,
"step": 1459
},
{
"epoch": 2.80512,
"grad_norm": 0.5266008377075195,
"learning_rate": 1.2465013075879884e-07,
"loss": 0.2504,
"step": 1460
},
{
"epoch": 2.8070399999999998,
"grad_norm": 0.556125283241272,
"learning_rate": 1.2217974576453072e-07,
"loss": 0.3333,
"step": 1461
},
{
"epoch": 2.80896,
"grad_norm": 0.49983927607536316,
"learning_rate": 1.1973378332779229e-07,
"loss": 0.2815,
"step": 1462
},
{
"epoch": 2.81088,
"grad_norm": 0.4906519055366516,
"learning_rate": 1.1731225569517113e-07,
"loss": 0.2363,
"step": 1463
},
{
"epoch": 2.8128,
"grad_norm": 0.4918469488620758,
"learning_rate": 1.1491517499091498e-07,
"loss": 0.2465,
"step": 1464
},
{
"epoch": 2.81472,
"grad_norm": 0.5118820667266846,
"learning_rate": 1.1254255321686836e-07,
"loss": 0.26,
"step": 1465
},
{
"epoch": 2.81664,
"grad_norm": 0.5007131695747375,
"learning_rate": 1.1019440225241317e-07,
"loss": 0.3018,
"step": 1466
},
{
"epoch": 2.8185599999999997,
"grad_norm": 0.49532267451286316,
"learning_rate": 1.078707338544105e-07,
"loss": 0.2797,
"step": 1467
},
{
"epoch": 2.82048,
"grad_norm": 0.4817069470882416,
"learning_rate": 1.055715596571405e-07,
"loss": 0.2722,
"step": 1468
},
{
"epoch": 2.8224,
"grad_norm": 0.4881128966808319,
"learning_rate": 1.0329689117224262e-07,
"loss": 0.2634,
"step": 1469
},
{
"epoch": 2.82432,
"grad_norm": 0.5298675298690796,
"learning_rate": 1.0104673978866164e-07,
"loss": 0.2736,
"step": 1470
},
{
"epoch": 2.82624,
"grad_norm": 0.5366477370262146,
"learning_rate": 9.882111677258777e-08,
"loss": 0.2894,
"step": 1471
},
{
"epoch": 2.82816,
"grad_norm": 0.4822460114955902,
"learning_rate": 9.662003326740166e-08,
"loss": 0.2611,
"step": 1472
},
{
"epoch": 2.83008,
"grad_norm": 0.5337690114974976,
"learning_rate": 9.444350029361671e-08,
"loss": 0.3096,
"step": 1473
},
{
"epoch": 2.832,
"grad_norm": 0.4977927803993225,
"learning_rate": 9.22915287488274e-08,
"loss": 0.2736,
"step": 1474
},
{
"epoch": 2.83392,
"grad_norm": 0.5260142087936401,
"learning_rate": 9.016412940765107e-08,
"loss": 0.3118,
"step": 1475
},
{
"epoch": 2.83584,
"grad_norm": 0.4731020927429199,
"learning_rate": 8.80613129216762e-08,
"loss": 0.2668,
"step": 1476
},
{
"epoch": 2.83776,
"grad_norm": 0.4878785014152527,
"learning_rate": 8.598308981940751e-08,
"loss": 0.2873,
"step": 1477
},
{
"epoch": 2.83968,
"grad_norm": 0.4620414674282074,
"learning_rate": 8.392947050621603e-08,
"loss": 0.2246,
"step": 1478
},
{
"epoch": 2.8416,
"grad_norm": 0.5118506550788879,
"learning_rate": 8.190046526428241e-08,
"loss": 0.3324,
"step": 1479
},
{
"epoch": 2.84352,
"grad_norm": 0.46563202142715454,
"learning_rate": 7.989608425254924e-08,
"loss": 0.2426,
"step": 1480
},
{
"epoch": 2.84544,
"grad_norm": 0.510045051574707,
"learning_rate": 7.791633750667105e-08,
"loss": 0.3047,
"step": 1481
},
{
"epoch": 2.84736,
"grad_norm": 0.4812230169773102,
"learning_rate": 7.59612349389599e-08,
"loss": 0.2377,
"step": 1482
},
{
"epoch": 2.8492800000000003,
"grad_norm": 0.5075802803039551,
"learning_rate": 7.403078633833716e-08,
"loss": 0.2649,
"step": 1483
},
{
"epoch": 2.8512,
"grad_norm": 0.5060356259346008,
"learning_rate": 7.212500137028789e-08,
"loss": 0.2953,
"step": 1484
},
{
"epoch": 2.85312,
"grad_norm": 0.4909372925758362,
"learning_rate": 7.024388957680705e-08,
"loss": 0.2578,
"step": 1485
},
{
"epoch": 2.85504,
"grad_norm": 0.48711639642715454,
"learning_rate": 6.838746037635735e-08,
"loss": 0.2729,
"step": 1486
},
{
"epoch": 2.85696,
"grad_norm": 0.4548548460006714,
"learning_rate": 6.655572306381696e-08,
"loss": 0.2415,
"step": 1487
},
{
"epoch": 2.85888,
"grad_norm": 0.5146133899688721,
"learning_rate": 6.474868681043578e-08,
"loss": 0.2602,
"step": 1488
},
{
"epoch": 2.8608000000000002,
"grad_norm": 0.5003006458282471,
"learning_rate": 6.29663606637898e-08,
"loss": 0.2683,
"step": 1489
},
{
"epoch": 2.86272,
"grad_norm": 0.5150914788246155,
"learning_rate": 6.120875354773459e-08,
"loss": 0.2973,
"step": 1490
},
{
"epoch": 2.86464,
"grad_norm": 0.49993202090263367,
"learning_rate": 5.947587426236079e-08,
"loss": 0.2668,
"step": 1491
},
{
"epoch": 2.8665599999999998,
"grad_norm": 0.4898831248283386,
"learning_rate": 5.776773148394976e-08,
"loss": 0.2855,
"step": 1492
},
{
"epoch": 2.86848,
"grad_norm": 0.5036088228225708,
"learning_rate": 5.608433376493194e-08,
"loss": 0.2714,
"step": 1493
},
{
"epoch": 2.8704,
"grad_norm": 0.5525497198104858,
"learning_rate": 5.4425689533841864e-08,
"loss": 0.269,
"step": 1494
},
{
"epoch": 2.87232,
"grad_norm": 0.5207503437995911,
"learning_rate": 5.279180709527765e-08,
"loss": 0.2956,
"step": 1495
},
{
"epoch": 2.87424,
"grad_norm": 0.4986329674720764,
"learning_rate": 5.1182694629857145e-08,
"loss": 0.2759,
"step": 1496
},
{
"epoch": 2.87616,
"grad_norm": 0.49119311571121216,
"learning_rate": 4.959836019417963e-08,
"loss": 0.2697,
"step": 1497
},
{
"epoch": 2.8780799999999997,
"grad_norm": 0.5001600980758667,
"learning_rate": 4.803881172078473e-08,
"loss": 0.2936,
"step": 1498
},
{
"epoch": 2.88,
"grad_norm": 0.47565126419067383,
"learning_rate": 4.650405701811078e-08,
"loss": 0.269,
"step": 1499
},
{
"epoch": 2.88192,
"grad_norm": 0.49541619420051575,
"learning_rate": 4.499410377045765e-08,
"loss": 0.2641,
"step": 1500
},
{
"epoch": 2.88384,
"grad_norm": 0.477107971906662,
"learning_rate": 4.350895953794898e-08,
"loss": 0.318,
"step": 1501
},
{
"epoch": 2.88576,
"grad_norm": 0.46682289242744446,
"learning_rate": 4.2048631756492206e-08,
"loss": 0.2875,
"step": 1502
},
{
"epoch": 2.88768,
"grad_norm": 0.5205910801887512,
"learning_rate": 4.0613127737741396e-08,
"loss": 0.2684,
"step": 1503
},
{
"epoch": 2.8895999999999997,
"grad_norm": 0.530179500579834,
"learning_rate": 3.9202454669063915e-08,
"loss": 0.3114,
"step": 1504
},
{
"epoch": 2.89152,
"grad_norm": 0.532010555267334,
"learning_rate": 3.781661961349992e-08,
"loss": 0.2945,
"step": 1505
},
{
"epoch": 2.89344,
"grad_norm": 0.5516195297241211,
"learning_rate": 3.645562950973014e-08,
"loss": 0.2969,
"step": 1506
},
{
"epoch": 2.89536,
"grad_norm": 0.5038628578186035,
"learning_rate": 3.5119491172039836e-08,
"loss": 0.2962,
"step": 1507
},
{
"epoch": 2.89728,
"grad_norm": 0.48654934763908386,
"learning_rate": 3.3808211290284886e-08,
"loss": 0.2868,
"step": 1508
},
{
"epoch": 2.8992,
"grad_norm": 0.5266822576522827,
"learning_rate": 3.252179642985909e-08,
"loss": 0.3144,
"step": 1509
},
{
"epoch": 2.90112,
"grad_norm": 0.4886174201965332,
"learning_rate": 3.1260253031660247e-08,
"loss": 0.2763,
"step": 1510
},
{
"epoch": 2.90304,
"grad_norm": 0.5198752880096436,
"learning_rate": 3.002358741205691e-08,
"loss": 0.2916,
"step": 1511
},
{
"epoch": 2.90496,
"grad_norm": 0.5090113282203674,
"learning_rate": 2.8811805762860578e-08,
"loss": 0.2705,
"step": 1512
},
{
"epoch": 2.90688,
"grad_norm": 0.5114381313323975,
"learning_rate": 2.762491415128965e-08,
"loss": 0.2943,
"step": 1513
},
{
"epoch": 2.9088000000000003,
"grad_norm": 0.4948650598526001,
"learning_rate": 2.6462918519944425e-08,
"loss": 0.2923,
"step": 1514
},
{
"epoch": 2.91072,
"grad_norm": 0.5100908279418945,
"learning_rate": 2.5325824686772138e-08,
"loss": 0.2735,
"step": 1515
},
{
"epoch": 2.91264,
"grad_norm": 0.5389769673347473,
"learning_rate": 2.4213638345040868e-08,
"loss": 0.3105,
"step": 1516
},
{
"epoch": 2.91456,
"grad_norm": 0.5490589737892151,
"learning_rate": 2.3126365063311228e-08,
"loss": 0.2912,
"step": 1517
},
{
"epoch": 2.91648,
"grad_norm": 0.5034134984016418,
"learning_rate": 2.206401028540639e-08,
"loss": 0.3156,
"step": 1518
},
{
"epoch": 2.9184,
"grad_norm": 0.5002502202987671,
"learning_rate": 2.1026579330387655e-08,
"loss": 0.2812,
"step": 1519
},
{
"epoch": 2.9203200000000002,
"grad_norm": 0.47155794501304626,
"learning_rate": 2.0014077392525035e-08,
"loss": 0.2399,
"step": 1520
},
{
"epoch": 2.92224,
"grad_norm": 0.5312910079956055,
"learning_rate": 1.9026509541272276e-08,
"loss": 0.3013,
"step": 1521
},
{
"epoch": 2.92416,
"grad_norm": 0.5147032737731934,
"learning_rate": 1.8063880721242986e-08,
"loss": 0.2988,
"step": 1522
},
{
"epoch": 2.92608,
"grad_norm": 0.47034305334091187,
"learning_rate": 1.712619575218344e-08,
"loss": 0.258,
"step": 1523
},
{
"epoch": 2.928,
"grad_norm": 0.5479640364646912,
"learning_rate": 1.6213459328950355e-08,
"loss": 0.3094,
"step": 1524
},
{
"epoch": 2.92992,
"grad_norm": 0.5018991827964783,
"learning_rate": 1.5325676021484826e-08,
"loss": 0.2191,
"step": 1525
},
{
"epoch": 2.9318400000000002,
"grad_norm": 0.49543485045433044,
"learning_rate": 1.4462850274794548e-08,
"loss": 0.2577,
"step": 1526
},
{
"epoch": 2.93376,
"grad_norm": 0.520496129989624,
"learning_rate": 1.3624986408924956e-08,
"loss": 0.2951,
"step": 1527
},
{
"epoch": 2.93568,
"grad_norm": 0.5302727818489075,
"learning_rate": 1.2812088618942009e-08,
"loss": 0.2911,
"step": 1528
},
{
"epoch": 2.9375999999999998,
"grad_norm": 0.5265939831733704,
"learning_rate": 1.2024160974911103e-08,
"loss": 0.2812,
"step": 1529
},
{
"epoch": 2.93952,
"grad_norm": 0.5374103784561157,
"learning_rate": 1.1261207421874309e-08,
"loss": 0.2886,
"step": 1530
},
{
"epoch": 2.94144,
"grad_norm": 0.541830837726593,
"learning_rate": 1.0523231779832055e-08,
"loss": 0.2786,
"step": 1531
},
{
"epoch": 2.94336,
"grad_norm": 0.4992273151874542,
"learning_rate": 9.810237743724805e-09,
"loss": 0.2866,
"step": 1532
},
{
"epoch": 2.94528,
"grad_norm": 0.502890408039093,
"learning_rate": 9.12222888341252e-09,
"loss": 0.2572,
"step": 1533
},
{
"epoch": 2.9472,
"grad_norm": 0.4967486262321472,
"learning_rate": 8.459208643659122e-09,
"loss": 0.2832,
"step": 1534
},
{
"epoch": 2.9491199999999997,
"grad_norm": 0.46821698546409607,
"learning_rate": 7.8211803441125e-09,
"loss": 0.2559,
"step": 1535
},
{
"epoch": 2.95104,
"grad_norm": 0.5248948335647583,
"learning_rate": 7.2081471792911914e-09,
"loss": 0.3153,
"step": 1536
},
{
"epoch": 2.95296,
"grad_norm": 0.4801546335220337,
"learning_rate": 6.6201122185649555e-09,
"loss": 0.2619,
"step": 1537
},
{
"epoch": 2.95488,
"grad_norm": 0.5282320380210876,
"learning_rate": 6.057078406142003e-09,
"loss": 0.3306,
"step": 1538
},
{
"epoch": 2.9568,
"grad_norm": 0.48265284299850464,
"learning_rate": 5.519048561053453e-09,
"loss": 0.2509,
"step": 1539
},
{
"epoch": 2.95872,
"grad_norm": 0.5104262232780457,
"learning_rate": 5.006025377138901e-09,
"loss": 0.266,
"step": 1540
},
{
"epoch": 2.96064,
"grad_norm": 0.4913363754749298,
"learning_rate": 4.518011423032542e-09,
"loss": 0.2688,
"step": 1541
},
{
"epoch": 2.96256,
"grad_norm": 0.5669986605644226,
"learning_rate": 4.055009142152066e-09,
"loss": 0.2879,
"step": 1542
},
{
"epoch": 2.96448,
"grad_norm": 0.5109623074531555,
"learning_rate": 3.6170208526836724e-09,
"loss": 0.2997,
"step": 1543
},
{
"epoch": 2.9664,
"grad_norm": 0.5076644420623779,
"learning_rate": 3.204048747573185e-09,
"loss": 0.3001,
"step": 1544
},
{
"epoch": 2.96832,
"grad_norm": 0.5129315257072449,
"learning_rate": 2.816094894513843e-09,
"loss": 0.265,
"step": 1545
},
{
"epoch": 2.97024,
"grad_norm": 0.4935891032218933,
"learning_rate": 2.4531612359363077e-09,
"loss": 0.2547,
"step": 1546
},
{
"epoch": 2.97216,
"grad_norm": 0.5044428110122681,
"learning_rate": 2.1152495889970035e-09,
"loss": 0.267,
"step": 1547
},
{
"epoch": 2.97408,
"grad_norm": 0.5168056488037109,
"learning_rate": 1.8023616455731253e-09,
"loss": 0.2817,
"step": 1548
},
{
"epoch": 2.976,
"grad_norm": 0.503968358039856,
"learning_rate": 1.514498972249312e-09,
"loss": 0.3095,
"step": 1549
},
{
"epoch": 2.97792,
"grad_norm": 0.5102472901344299,
"learning_rate": 1.2516630103137638e-09,
"loss": 0.3095,
"step": 1550
},
{
"epoch": 2.9798400000000003,
"grad_norm": 0.5061061978340149,
"learning_rate": 1.0138550757493592e-09,
"loss": 0.3246,
"step": 1551
},
{
"epoch": 2.98176,
"grad_norm": 0.4807405471801758,
"learning_rate": 8.010763592264381e-10,
"loss": 0.26,
"step": 1552
},
{
"epoch": 2.98368,
"grad_norm": 0.5292450189590454,
"learning_rate": 6.133279260983616e-10,
"loss": 0.3251,
"step": 1553
},
{
"epoch": 2.9856,
"grad_norm": 0.47705909609794617,
"learning_rate": 4.506107163948503e-10,
"loss": 0.2451,
"step": 1554
},
{
"epoch": 2.98752,
"grad_norm": 0.512753427028656,
"learning_rate": 3.12925544818099e-10,
"loss": 0.3267,
"step": 1555
},
{
"epoch": 2.98944,
"grad_norm": 0.48979803919792175,
"learning_rate": 2.0027310073833516e-10,
"loss": 0.2593,
"step": 1556
},
{
"epoch": 2.9913600000000002,
"grad_norm": 0.5256837606430054,
"learning_rate": 1.1265394818993358e-10,
"loss": 0.3219,
"step": 1557
},
{
"epoch": 2.99328,
"grad_norm": 0.5066260099411011,
"learning_rate": 5.0068525870305974e-11,
"loss": 0.2918,
"step": 1558
},
{
"epoch": 2.9952,
"grad_norm": 0.5615788102149963,
"learning_rate": 1.251714713546015e-11,
"loss": 0.3611,
"step": 1559
},
{
"epoch": 2.99712,
"grad_norm": 0.509102463722229,
"learning_rate": 0.0,
"loss": 0.2725,
"step": 1560
},
{
"epoch": 2.99712,
"step": 1560,
"total_flos": 239004782878720.0,
"train_loss": 0.33505433158805736,
"train_runtime": 13521.1545,
"train_samples_per_second": 11.094,
"train_steps_per_second": 0.115
}
],
"logging_steps": 1.0,
"max_steps": 1560,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 239004782878720.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}