| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.956190476190476, | |
| "eval_steps": 500, | |
| "global_step": 325, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.015238095238095238, | |
| "grad_norm": 6.41825703019793, | |
| "learning_rate": 2.4242424242424244e-06, | |
| "loss": 1.0212, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.030476190476190476, | |
| "grad_norm": 6.32460329298527, | |
| "learning_rate": 4.848484848484849e-06, | |
| "loss": 1.0078, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.045714285714285714, | |
| "grad_norm": 5.823493836887707, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.9926, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.06095238095238095, | |
| "grad_norm": 4.215092249626398, | |
| "learning_rate": 9.696969696969698e-06, | |
| "loss": 0.9511, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0761904761904762, | |
| "grad_norm": 2.755821783551069, | |
| "learning_rate": 1.2121212121212122e-05, | |
| "loss": 0.924, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.09142857142857143, | |
| "grad_norm": 5.79854862437273, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.9436, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 6.970153409745764, | |
| "learning_rate": 1.6969696969696972e-05, | |
| "loss": 0.9199, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1219047619047619, | |
| "grad_norm": 6.398045150850651, | |
| "learning_rate": 1.9393939393939395e-05, | |
| "loss": 0.9368, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.13714285714285715, | |
| "grad_norm": 3.7323118823746717, | |
| "learning_rate": 2.1818181818181818e-05, | |
| "loss": 0.8624, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1523809523809524, | |
| "grad_norm": 2.4649345087328935, | |
| "learning_rate": 2.4242424242424244e-05, | |
| "loss": 0.8177, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1676190476190476, | |
| "grad_norm": 2.624350847053939, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.7966, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.18285714285714286, | |
| "grad_norm": 1.495708592085656, | |
| "learning_rate": 2.9090909090909093e-05, | |
| "loss": 0.7813, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.1980952380952381, | |
| "grad_norm": 1.4871227985381805, | |
| "learning_rate": 3.151515151515152e-05, | |
| "loss": 0.7453, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 2.060287535099846, | |
| "learning_rate": 3.3939393939393945e-05, | |
| "loss": 0.7371, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.22857142857142856, | |
| "grad_norm": 1.4326857447271233, | |
| "learning_rate": 3.6363636363636364e-05, | |
| "loss": 0.7234, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2438095238095238, | |
| "grad_norm": 2.1165784341208105, | |
| "learning_rate": 3.878787878787879e-05, | |
| "loss": 0.7143, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.259047619047619, | |
| "grad_norm": 1.7376117944761271, | |
| "learning_rate": 4.1212121212121216e-05, | |
| "loss": 0.7091, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2742857142857143, | |
| "grad_norm": 1.9235494481574669, | |
| "learning_rate": 4.3636363636363636e-05, | |
| "loss": 0.7033, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2895238095238095, | |
| "grad_norm": 1.2885542410242774, | |
| "learning_rate": 4.606060606060607e-05, | |
| "loss": 0.693, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.3047619047619048, | |
| "grad_norm": 2.0682542365556746, | |
| "learning_rate": 4.848484848484849e-05, | |
| "loss": 0.6782, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.1294865223043438, | |
| "learning_rate": 5.0909090909090914e-05, | |
| "loss": 0.6734, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.3352380952380952, | |
| "grad_norm": 1.6892295840685134, | |
| "learning_rate": 5.333333333333333e-05, | |
| "loss": 0.6701, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3504761904761905, | |
| "grad_norm": 1.7015145087043497, | |
| "learning_rate": 5.5757575757575766e-05, | |
| "loss": 0.6747, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3657142857142857, | |
| "grad_norm": 1.105513245543281, | |
| "learning_rate": 5.8181818181818185e-05, | |
| "loss": 0.6569, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 1.9301412200235886, | |
| "learning_rate": 6.060606060606061e-05, | |
| "loss": 0.6672, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3961904761904762, | |
| "grad_norm": 1.5372986311163992, | |
| "learning_rate": 6.303030303030304e-05, | |
| "loss": 0.6575, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.4114285714285714, | |
| "grad_norm": 1.841770844497413, | |
| "learning_rate": 6.545454545454546e-05, | |
| "loss": 0.6439, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4266666666666667, | |
| "grad_norm": 1.285994127925713, | |
| "learning_rate": 6.787878787878789e-05, | |
| "loss": 0.6421, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4419047619047619, | |
| "grad_norm": 1.306641316576376, | |
| "learning_rate": 7.03030303030303e-05, | |
| "loss": 0.6446, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.45714285714285713, | |
| "grad_norm": 2.060289337588267, | |
| "learning_rate": 7.272727272727273e-05, | |
| "loss": 0.6418, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4723809523809524, | |
| "grad_norm": 1.5995121846928928, | |
| "learning_rate": 7.515151515151517e-05, | |
| "loss": 0.6318, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4876190476190476, | |
| "grad_norm": 1.5361156868763157, | |
| "learning_rate": 7.757575757575758e-05, | |
| "loss": 0.6356, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5028571428571429, | |
| "grad_norm": 1.9790958633956652, | |
| "learning_rate": 8e-05, | |
| "loss": 0.6315, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.518095238095238, | |
| "grad_norm": 1.0983212679170282, | |
| "learning_rate": 7.999768495280586e-05, | |
| "loss": 0.6212, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 2.356052428059575, | |
| "learning_rate": 7.999074007919565e-05, | |
| "loss": 0.6366, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.5485714285714286, | |
| "grad_norm": 1.9118106657858278, | |
| "learning_rate": 7.997916618305483e-05, | |
| "loss": 0.6337, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5638095238095238, | |
| "grad_norm": 2.086875421088417, | |
| "learning_rate": 7.996296460408921e-05, | |
| "loss": 0.6243, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.579047619047619, | |
| "grad_norm": 1.6815412319889804, | |
| "learning_rate": 7.994213721766979e-05, | |
| "loss": 0.6222, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5942857142857143, | |
| "grad_norm": 1.9619509595749545, | |
| "learning_rate": 7.99166864346157e-05, | |
| "loss": 0.6185, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.6095238095238096, | |
| "grad_norm": 1.1850043703613138, | |
| "learning_rate": 7.988661520091513e-05, | |
| "loss": 0.6222, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.6247619047619047, | |
| "grad_norm": 1.5413225249585591, | |
| "learning_rate": 7.985192699738432e-05, | |
| "loss": 0.6131, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.2994933443512697, | |
| "learning_rate": 7.981262583926472e-05, | |
| "loss": 0.6326, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.6552380952380953, | |
| "grad_norm": 0.8610367129145644, | |
| "learning_rate": 7.976871627575808e-05, | |
| "loss": 0.6057, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6704761904761904, | |
| "grad_norm": 2.630926210907478, | |
| "learning_rate": 7.972020338950004e-05, | |
| "loss": 0.6366, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6857142857142857, | |
| "grad_norm": 1.542557855004854, | |
| "learning_rate": 7.96670927959716e-05, | |
| "loss": 0.624, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.700952380952381, | |
| "grad_norm": 1.9643148826366752, | |
| "learning_rate": 7.960939064284934e-05, | |
| "loss": 0.619, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.7161904761904762, | |
| "grad_norm": 1.248523269079334, | |
| "learning_rate": 7.954710360929362e-05, | |
| "loss": 0.6108, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.7314285714285714, | |
| "grad_norm": 1.225316205009982, | |
| "learning_rate": 7.948023890517557e-05, | |
| "loss": 0.6126, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.7466666666666667, | |
| "grad_norm": 1.4200551325493893, | |
| "learning_rate": 7.940880427024243e-05, | |
| "loss": 0.6093, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 1.3484644010507392, | |
| "learning_rate": 7.933280797322181e-05, | |
| "loss": 0.5995, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7771428571428571, | |
| "grad_norm": 1.317639647361387, | |
| "learning_rate": 7.925225881086437e-05, | |
| "loss": 0.5967, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7923809523809524, | |
| "grad_norm": 1.5340582175990225, | |
| "learning_rate": 7.916716610692578e-05, | |
| "loss": 0.5981, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.8076190476190476, | |
| "grad_norm": 1.0386828999827622, | |
| "learning_rate": 7.907753971108728e-05, | |
| "loss": 0.5896, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.8228571428571428, | |
| "grad_norm": 1.5332648656890953, | |
| "learning_rate": 7.898338999781567e-05, | |
| "loss": 0.589, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.8380952380952381, | |
| "grad_norm": 1.1339766871931323, | |
| "learning_rate": 7.888472786516246e-05, | |
| "loss": 0.5937, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 1.5164034405411189, | |
| "learning_rate": 7.87815647335023e-05, | |
| "loss": 0.5883, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.8685714285714285, | |
| "grad_norm": 0.9700436403678562, | |
| "learning_rate": 7.86739125442111e-05, | |
| "loss": 0.5931, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8838095238095238, | |
| "grad_norm": 1.3463613799484717, | |
| "learning_rate": 7.856178375828384e-05, | |
| "loss": 0.589, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8990476190476191, | |
| "grad_norm": 0.8752856535881799, | |
| "learning_rate": 7.844519135489204e-05, | |
| "loss": 0.5754, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.9142857142857143, | |
| "grad_norm": 1.1854440512880053, | |
| "learning_rate": 7.832414882988153e-05, | |
| "loss": 0.5894, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.9295238095238095, | |
| "grad_norm": 1.7051861411911757, | |
| "learning_rate": 7.819867019421023e-05, | |
| "loss": 0.5885, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.9447619047619048, | |
| "grad_norm": 0.6380913241526577, | |
| "learning_rate": 7.806876997232625e-05, | |
| "loss": 0.5834, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.3953430391233206, | |
| "learning_rate": 7.793446320048687e-05, | |
| "loss": 0.5783, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.9752380952380952, | |
| "grad_norm": 1.0079170376946163, | |
| "learning_rate": 7.779576542501781e-05, | |
| "loss": 0.5839, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.9904761904761905, | |
| "grad_norm": 1.1878161560623453, | |
| "learning_rate": 7.765269270051389e-05, | |
| "loss": 0.575, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.0066666666666666, | |
| "grad_norm": 1.495861192796852, | |
| "learning_rate": 7.750526158798056e-05, | |
| "loss": 0.8303, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.0219047619047619, | |
| "grad_norm": 1.3683956168224771, | |
| "learning_rate": 7.735348915291701e-05, | |
| "loss": 0.571, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.0371428571428571, | |
| "grad_norm": 0.8831683489698775, | |
| "learning_rate": 7.719739296334073e-05, | |
| "loss": 0.5548, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.0523809523809524, | |
| "grad_norm": 0.9884633873935289, | |
| "learning_rate": 7.7036991087754e-05, | |
| "loss": 0.5619, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.0676190476190477, | |
| "grad_norm": 1.658491152165491, | |
| "learning_rate": 7.687230209305242e-05, | |
| "loss": 0.5876, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.0828571428571427, | |
| "grad_norm": 0.8352075307968011, | |
| "learning_rate": 7.670334504237575e-05, | |
| "loss": 0.5685, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.098095238095238, | |
| "grad_norm": 1.429719144977111, | |
| "learning_rate": 7.65301394929013e-05, | |
| "loss": 0.5764, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.1133333333333333, | |
| "grad_norm": 0.9027318597099093, | |
| "learning_rate": 7.635270549358012e-05, | |
| "loss": 0.5602, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.1285714285714286, | |
| "grad_norm": 1.0536650805805048, | |
| "learning_rate": 7.617106358281634e-05, | |
| "loss": 0.5765, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.1438095238095238, | |
| "grad_norm": 0.8336607908560256, | |
| "learning_rate": 7.598523478608974e-05, | |
| "loss": 0.5583, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.159047619047619, | |
| "grad_norm": 0.7166113488943966, | |
| "learning_rate": 7.579524061352206e-05, | |
| "loss": 0.551, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.1742857142857144, | |
| "grad_norm": 0.8105369080449557, | |
| "learning_rate": 7.560110305738708e-05, | |
| "loss": 0.5659, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.1895238095238094, | |
| "grad_norm": 0.6283210931425559, | |
| "learning_rate": 7.540284458956503e-05, | |
| "loss": 0.5379, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.2047619047619047, | |
| "grad_norm": 0.8126105033449552, | |
| "learning_rate": 7.520048815894144e-05, | |
| "loss": 0.5476, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.5252506521520861, | |
| "learning_rate": 7.49940571887506e-05, | |
| "loss": 0.5551, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.2352380952380952, | |
| "grad_norm": 0.6609919242663752, | |
| "learning_rate": 7.478357557386444e-05, | |
| "loss": 0.5407, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.2504761904761905, | |
| "grad_norm": 0.576238102776681, | |
| "learning_rate": 7.456906767802658e-05, | |
| "loss": 0.5465, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.2657142857142858, | |
| "grad_norm": 0.5233988001346253, | |
| "learning_rate": 7.435055833103213e-05, | |
| "loss": 0.55, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.2809523809523808, | |
| "grad_norm": 0.44023553757218015, | |
| "learning_rate": 7.412807282585362e-05, | |
| "loss": 0.5412, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.2961904761904761, | |
| "grad_norm": 0.4831178702636027, | |
| "learning_rate": 7.390163691571325e-05, | |
| "loss": 0.5398, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.3114285714285714, | |
| "grad_norm": 0.454634628403172, | |
| "learning_rate": 7.367127681110199e-05, | |
| "loss": 0.534, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.3266666666666667, | |
| "grad_norm": 0.44419681534445155, | |
| "learning_rate": 7.343701917674547e-05, | |
| "loss": 0.5421, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.341904761904762, | |
| "grad_norm": 0.4888479476988804, | |
| "learning_rate": 7.319889112851771e-05, | |
| "loss": 0.5371, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.3571428571428572, | |
| "grad_norm": 0.7143397739870562, | |
| "learning_rate": 7.295692023030217e-05, | |
| "loss": 0.5377, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.3723809523809525, | |
| "grad_norm": 0.9465106459896011, | |
| "learning_rate": 7.271113449080131e-05, | |
| "loss": 0.5479, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.3876190476190478, | |
| "grad_norm": 1.2678262246780991, | |
| "learning_rate": 7.246156236029445e-05, | |
| "loss": 0.5452, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.4028571428571428, | |
| "grad_norm": 0.6281248994163343, | |
| "learning_rate": 7.220823272734461e-05, | |
| "loss": 0.5401, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.418095238095238, | |
| "grad_norm": 0.34247752257094766, | |
| "learning_rate": 7.19511749154546e-05, | |
| "loss": 0.5394, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.4333333333333333, | |
| "grad_norm": 0.7014077226631056, | |
| "learning_rate": 7.169041867967272e-05, | |
| "loss": 0.5407, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.4485714285714286, | |
| "grad_norm": 0.9221215435654039, | |
| "learning_rate": 7.142599420314857e-05, | |
| "loss": 0.5464, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.4638095238095237, | |
| "grad_norm": 0.8724875653280793, | |
| "learning_rate": 7.115793209363929e-05, | |
| "loss": 0.5379, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.479047619047619, | |
| "grad_norm": 0.734302784896464, | |
| "learning_rate": 7.088626337996657e-05, | |
| "loss": 0.5248, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.4942857142857142, | |
| "grad_norm": 0.6804548302488894, | |
| "learning_rate": 7.06110195084251e-05, | |
| "loss": 0.5403, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.5095238095238095, | |
| "grad_norm": 0.8180102079360684, | |
| "learning_rate": 7.03322323391425e-05, | |
| "loss": 0.5356, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.5247619047619048, | |
| "grad_norm": 0.7089574204038593, | |
| "learning_rate": 7.004993414239147e-05, | |
| "loss": 0.5367, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.4534978500691297, | |
| "learning_rate": 6.976415759485443e-05, | |
| "loss": 0.5301, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.5552380952380953, | |
| "grad_norm": 0.5791842384826449, | |
| "learning_rate": 6.947493577584111e-05, | |
| "loss": 0.5338, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.5704761904761906, | |
| "grad_norm": 0.6373667696236239, | |
| "learning_rate": 6.918230216345951e-05, | |
| "loss": 0.5255, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.5857142857142859, | |
| "grad_norm": 0.5813750117468534, | |
| "learning_rate": 6.888629063074082e-05, | |
| "loss": 0.5209, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.600952380952381, | |
| "grad_norm": 0.5731639034091425, | |
| "learning_rate": 6.858693544171845e-05, | |
| "loss": 0.5215, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.6161904761904762, | |
| "grad_norm": 0.5351450945596543, | |
| "learning_rate": 6.828427124746191e-05, | |
| "loss": 0.5301, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.6314285714285715, | |
| "grad_norm": 0.6016816279659157, | |
| "learning_rate": 6.797833308206588e-05, | |
| "loss": 0.529, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.6466666666666665, | |
| "grad_norm": 0.7659521464895882, | |
| "learning_rate": 6.766915635859497e-05, | |
| "loss": 0.533, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.6619047619047618, | |
| "grad_norm": 0.9630524913355004, | |
| "learning_rate": 6.735677686498443e-05, | |
| "loss": 0.5297, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.677142857142857, | |
| "grad_norm": 1.1492200252998914, | |
| "learning_rate": 6.70412307598978e-05, | |
| "loss": 0.5264, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.6923809523809523, | |
| "grad_norm": 0.5857314920473611, | |
| "learning_rate": 6.672255456854135e-05, | |
| "loss": 0.5241, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.7076190476190476, | |
| "grad_norm": 0.48173626706101424, | |
| "learning_rate": 6.640078517843619e-05, | |
| "loss": 0.5346, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.7228571428571429, | |
| "grad_norm": 0.7730414410041352, | |
| "learning_rate": 6.60759598351485e-05, | |
| "loss": 0.5229, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.7380952380952381, | |
| "grad_norm": 0.6363250099986629, | |
| "learning_rate": 6.57481161379783e-05, | |
| "loss": 0.5243, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.7533333333333334, | |
| "grad_norm": 0.45624023704141803, | |
| "learning_rate": 6.54172920356071e-05, | |
| "loss": 0.5247, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.7685714285714287, | |
| "grad_norm": 0.4715396085670591, | |
| "learning_rate": 6.508352582170543e-05, | |
| "loss": 0.5144, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.783809523809524, | |
| "grad_norm": 0.5945001964397519, | |
| "learning_rate": 6.474685613050014e-05, | |
| "loss": 0.5279, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.799047619047619, | |
| "grad_norm": 0.5588842197173406, | |
| "learning_rate": 6.440732193230241e-05, | |
| "loss": 0.522, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.8142857142857143, | |
| "grad_norm": 0.36397509002448036, | |
| "learning_rate": 6.40649625289969e-05, | |
| "loss": 0.5206, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.8295238095238096, | |
| "grad_norm": 0.5225246500798869, | |
| "learning_rate": 6.371981754949241e-05, | |
| "loss": 0.5233, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.8447619047619046, | |
| "grad_norm": 0.6419719236551851, | |
| "learning_rate": 6.337192694513474e-05, | |
| "loss": 0.5265, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.8599999999999999, | |
| "grad_norm": 0.41672473350375433, | |
| "learning_rate": 6.30213309850823e-05, | |
| "loss": 0.5165, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.8752380952380951, | |
| "grad_norm": 0.29619919377664744, | |
| "learning_rate": 6.266807025164472e-05, | |
| "loss": 0.5158, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.8904761904761904, | |
| "grad_norm": 0.41731867813128104, | |
| "learning_rate": 6.231218563558551e-05, | |
| "loss": 0.518, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.9057142857142857, | |
| "grad_norm": 0.3648760435140768, | |
| "learning_rate": 6.195371833138878e-05, | |
| "loss": 0.5165, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.920952380952381, | |
| "grad_norm": 0.3531467699123105, | |
| "learning_rate": 6.159270983249084e-05, | |
| "loss": 0.5247, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.9361904761904762, | |
| "grad_norm": 0.45588003744063627, | |
| "learning_rate": 6.122920192647734e-05, | |
| "loss": 0.5197, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.9514285714285715, | |
| "grad_norm": 0.5510619573466758, | |
| "learning_rate": 6.086323669024616e-05, | |
| "loss": 0.5256, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.9666666666666668, | |
| "grad_norm": 0.5646853258922581, | |
| "learning_rate": 6.049485648513696e-05, | |
| "loss": 0.527, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.981904761904762, | |
| "grad_norm": 0.4479646401937566, | |
| "learning_rate": 6.012410395202774e-05, | |
| "loss": 0.5234, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.997142857142857, | |
| "grad_norm": 0.575986791359234, | |
| "learning_rate": 5.9751022006399095e-05, | |
| "loss": 0.7009, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.013333333333333, | |
| "grad_norm": 0.814219365805337, | |
| "learning_rate": 5.9375653833366585e-05, | |
| "loss": 0.5404, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.0285714285714285, | |
| "grad_norm": 0.8146199046124171, | |
| "learning_rate": 5.8998042882682024e-05, | |
| "loss": 0.4877, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.0438095238095237, | |
| "grad_norm": 0.7629490780519704, | |
| "learning_rate": 5.861823286370396e-05, | |
| "loss": 0.4849, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.059047619047619, | |
| "grad_norm": 0.6570978207497692, | |
| "learning_rate": 5.823626774033836e-05, | |
| "loss": 0.4887, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.0742857142857143, | |
| "grad_norm": 0.5305731216336181, | |
| "learning_rate": 5.7852191725949564e-05, | |
| "loss": 0.4881, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.0895238095238096, | |
| "grad_norm": 0.512493977688762, | |
| "learning_rate": 5.746604927824257e-05, | |
| "loss": 0.4844, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.104761904761905, | |
| "grad_norm": 0.4828622337949408, | |
| "learning_rate": 5.707788509411685e-05, | |
| "loss": 0.4887, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.4884027438000281, | |
| "learning_rate": 5.668774410449268e-05, | |
| "loss": 0.4908, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.1352380952380954, | |
| "grad_norm": 0.42776751878992564, | |
| "learning_rate": 5.629567146911023e-05, | |
| "loss": 0.4872, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.1504761904761907, | |
| "grad_norm": 0.36115257273207996, | |
| "learning_rate": 5.590171257130225e-05, | |
| "loss": 0.4861, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.1657142857142855, | |
| "grad_norm": 0.38436733504559367, | |
| "learning_rate": 5.550591301274073e-05, | |
| "loss": 0.4844, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.1809523809523808, | |
| "grad_norm": 0.3435583779900254, | |
| "learning_rate": 5.5108318608158595e-05, | |
| "loss": 0.4836, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.196190476190476, | |
| "grad_norm": 0.2857883944660014, | |
| "learning_rate": 5.4708975380046356e-05, | |
| "loss": 0.4796, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.2114285714285713, | |
| "grad_norm": 0.31512140613431383, | |
| "learning_rate": 5.430792955332503e-05, | |
| "loss": 0.4863, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.2266666666666666, | |
| "grad_norm": 0.31769228977835245, | |
| "learning_rate": 5.3905227549995366e-05, | |
| "loss": 0.4809, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.241904761904762, | |
| "grad_norm": 0.27924100773876076, | |
| "learning_rate": 5.350091598376454e-05, | |
| "loss": 0.4805, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.257142857142857, | |
| "grad_norm": 0.2662485345450648, | |
| "learning_rate": 5.3095041654650384e-05, | |
| "loss": 0.4831, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.2723809523809524, | |
| "grad_norm": 0.23461248952656025, | |
| "learning_rate": 5.268765154356425e-05, | |
| "loss": 0.4816, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.2876190476190477, | |
| "grad_norm": 0.240243576104443, | |
| "learning_rate": 5.227879280687281e-05, | |
| "loss": 0.4788, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.302857142857143, | |
| "grad_norm": 0.24392104367270967, | |
| "learning_rate": 5.186851277093961e-05, | |
| "loss": 0.4851, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.318095238095238, | |
| "grad_norm": 0.19828908319240954, | |
| "learning_rate": 5.1456858926646974e-05, | |
| "loss": 0.4817, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.2354905746190516, | |
| "learning_rate": 5.104387892389877e-05, | |
| "loss": 0.4816, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.3485714285714288, | |
| "grad_norm": 0.24408900516772156, | |
| "learning_rate": 5.062962056610478e-05, | |
| "loss": 0.4833, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.3638095238095236, | |
| "grad_norm": 0.22588988217182931, | |
| "learning_rate": 5.021413180464748e-05, | |
| "loss": 0.4884, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.379047619047619, | |
| "grad_norm": 0.20985654308992252, | |
| "learning_rate": 4.979746073333145e-05, | |
| "loss": 0.4777, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.394285714285714, | |
| "grad_norm": 0.24110384355800776, | |
| "learning_rate": 4.9379655582816396e-05, | |
| "loss": 0.4867, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.4095238095238094, | |
| "grad_norm": 0.20401637758198887, | |
| "learning_rate": 4.896076471503439e-05, | |
| "loss": 0.4766, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.4247619047619047, | |
| "grad_norm": 0.19932449853927872, | |
| "learning_rate": 4.854083661759185e-05, | |
| "loss": 0.4884, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.24533376047455172, | |
| "learning_rate": 4.8119919898156934e-05, | |
| "loss": 0.4834, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.455238095238095, | |
| "grad_norm": 0.19336383545060395, | |
| "learning_rate": 4.76980632788332e-05, | |
| "loss": 0.4779, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.4704761904761905, | |
| "grad_norm": 0.19824370981981676, | |
| "learning_rate": 4.7275315590519765e-05, | |
| "loss": 0.4823, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.4857142857142858, | |
| "grad_norm": 0.20431330071012052, | |
| "learning_rate": 4.6851725767259106e-05, | |
| "loss": 0.4781, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.500952380952381, | |
| "grad_norm": 0.2082524151757895, | |
| "learning_rate": 4.642734284057283e-05, | |
| "loss": 0.4755, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.5161904761904763, | |
| "grad_norm": 0.2129960852067495, | |
| "learning_rate": 4.600221593378611e-05, | |
| "loss": 0.4796, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.5314285714285716, | |
| "grad_norm": 0.17147652502249622, | |
| "learning_rate": 4.5576394256341596e-05, | |
| "loss": 0.4784, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.546666666666667, | |
| "grad_norm": 0.2328806782705143, | |
| "learning_rate": 4.514992709810324e-05, | |
| "loss": 0.4839, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.5619047619047617, | |
| "grad_norm": 0.2628457128916824, | |
| "learning_rate": 4.472286382365096e-05, | |
| "loss": 0.4899, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.5771428571428574, | |
| "grad_norm": 0.20530497111629914, | |
| "learning_rate": 4.429525386656653e-05, | |
| "loss": 0.4865, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.5923809523809522, | |
| "grad_norm": 0.18615860925149122, | |
| "learning_rate": 4.38671467237115e-05, | |
| "loss": 0.4841, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.6076190476190475, | |
| "grad_norm": 0.181524499480342, | |
| "learning_rate": 4.343859194949787e-05, | |
| "loss": 0.4792, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.6228571428571428, | |
| "grad_norm": 0.2025029996461714, | |
| "learning_rate": 4.3009639150152e-05, | |
| "loss": 0.4788, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.638095238095238, | |
| "grad_norm": 0.19560730560439193, | |
| "learning_rate": 4.2580337977972664e-05, | |
| "loss": 0.4827, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.6533333333333333, | |
| "grad_norm": 0.16741577940585486, | |
| "learning_rate": 4.215073812558352e-05, | |
| "loss": 0.4885, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.6685714285714286, | |
| "grad_norm": 0.1739859347419123, | |
| "learning_rate": 4.1720889320181224e-05, | |
| "loss": 0.4772, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.683809523809524, | |
| "grad_norm": 0.19324214245630159, | |
| "learning_rate": 4.1290841317779325e-05, | |
| "loss": 0.4884, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.699047619047619, | |
| "grad_norm": 0.162133413448313, | |
| "learning_rate": 4.0860643897448894e-05, | |
| "loss": 0.4789, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.7142857142857144, | |
| "grad_norm": 0.1910560430197172, | |
| "learning_rate": 4.043034685555647e-05, | |
| "loss": 0.4807, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.7295238095238097, | |
| "grad_norm": 0.14823981314032625, | |
| "learning_rate": 4e-05, | |
| "loss": 0.481, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.744761904761905, | |
| "grad_norm": 0.1784212235327859, | |
| "learning_rate": 3.9569653144443546e-05, | |
| "loss": 0.481, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.200733513756011, | |
| "learning_rate": 3.913935610255112e-05, | |
| "loss": 0.4763, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.7752380952380955, | |
| "grad_norm": 0.14938330915090647, | |
| "learning_rate": 3.870915868222068e-05, | |
| "loss": 0.4819, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.7904761904761903, | |
| "grad_norm": 0.1739261226284681, | |
| "learning_rate": 3.8279110679818775e-05, | |
| "loss": 0.4738, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.8057142857142856, | |
| "grad_norm": 0.19838890478019414, | |
| "learning_rate": 3.78492618744165e-05, | |
| "loss": 0.4783, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.820952380952381, | |
| "grad_norm": 0.1528772994448381, | |
| "learning_rate": 3.7419662022027356e-05, | |
| "loss": 0.4769, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.836190476190476, | |
| "grad_norm": 0.1919042097767604, | |
| "learning_rate": 3.6990360849848005e-05, | |
| "loss": 0.4757, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.8514285714285714, | |
| "grad_norm": 0.17566953990872838, | |
| "learning_rate": 3.656140805050215e-05, | |
| "loss": 0.4786, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.8666666666666667, | |
| "grad_norm": 0.1637970789000741, | |
| "learning_rate": 3.6132853276288506e-05, | |
| "loss": 0.4739, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.881904761904762, | |
| "grad_norm": 0.18549958217752188, | |
| "learning_rate": 3.570474613343348e-05, | |
| "loss": 0.4768, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.8971428571428572, | |
| "grad_norm": 0.18002674770058677, | |
| "learning_rate": 3.527713617634904e-05, | |
| "loss": 0.4733, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.9123809523809525, | |
| "grad_norm": 0.15898767756027013, | |
| "learning_rate": 3.4850072901896766e-05, | |
| "loss": 0.4802, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.9276190476190473, | |
| "grad_norm": 0.17974934358276728, | |
| "learning_rate": 3.442360574365843e-05, | |
| "loss": 0.4834, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.942857142857143, | |
| "grad_norm": 0.14203142815133346, | |
| "learning_rate": 3.39977840662139e-05, | |
| "loss": 0.4747, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.958095238095238, | |
| "grad_norm": 0.17952115377014885, | |
| "learning_rate": 3.3572657159427186e-05, | |
| "loss": 0.4808, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.9733333333333336, | |
| "grad_norm": 0.17353334331590287, | |
| "learning_rate": 3.31482742327409e-05, | |
| "loss": 0.4751, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.9885714285714284, | |
| "grad_norm": 0.1528037773950146, | |
| "learning_rate": 3.272468440948025e-05, | |
| "loss": 0.4734, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 3.0047619047619047, | |
| "grad_norm": 0.20480536463226645, | |
| "learning_rate": 3.230193672116681e-05, | |
| "loss": 0.6928, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "grad_norm": 0.19151123516717333, | |
| "learning_rate": 3.188008010184306e-05, | |
| "loss": 0.4452, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 3.0352380952380953, | |
| "grad_norm": 0.2104984649163227, | |
| "learning_rate": 3.145916338240816e-05, | |
| "loss": 0.4546, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 3.0504761904761906, | |
| "grad_norm": 0.19741714449829997, | |
| "learning_rate": 3.103923528496562e-05, | |
| "loss": 0.4452, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.065714285714286, | |
| "grad_norm": 0.19623857285807467, | |
| "learning_rate": 3.062034441718362e-05, | |
| "loss": 0.4483, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 3.080952380952381, | |
| "grad_norm": 0.25035024771569697, | |
| "learning_rate": 3.0202539266668568e-05, | |
| "loss": 0.4506, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 3.0961904761904764, | |
| "grad_norm": 0.16968910324281286, | |
| "learning_rate": 2.9785868195352525e-05, | |
| "loss": 0.4449, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.111428571428571, | |
| "grad_norm": 0.23581488026041375, | |
| "learning_rate": 2.9370379433895228e-05, | |
| "loss": 0.4512, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.1266666666666665, | |
| "grad_norm": 0.15672745746884265, | |
| "learning_rate": 2.8956121076101244e-05, | |
| "loss": 0.449, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.1419047619047618, | |
| "grad_norm": 0.20702186752340365, | |
| "learning_rate": 2.8543141073353026e-05, | |
| "loss": 0.4439, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.157142857142857, | |
| "grad_norm": 0.16133838526688454, | |
| "learning_rate": 2.8131487229060387e-05, | |
| "loss": 0.4427, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.1723809523809523, | |
| "grad_norm": 0.1854062242780086, | |
| "learning_rate": 2.7721207193127212e-05, | |
| "loss": 0.4472, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.1876190476190476, | |
| "grad_norm": 0.17182738319155882, | |
| "learning_rate": 2.7312348456435766e-05, | |
| "loss": 0.4453, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 3.202857142857143, | |
| "grad_norm": 0.16697225397908244, | |
| "learning_rate": 2.6904958345349633e-05, | |
| "loss": 0.4561, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.218095238095238, | |
| "grad_norm": 0.16821522604659328, | |
| "learning_rate": 2.6499084016235475e-05, | |
| "loss": 0.444, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 3.2333333333333334, | |
| "grad_norm": 0.14853827941880265, | |
| "learning_rate": 2.609477245000464e-05, | |
| "loss": 0.44, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 3.2485714285714287, | |
| "grad_norm": 0.1502037122279702, | |
| "learning_rate": 2.569207044667498e-05, | |
| "loss": 0.4494, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 3.263809523809524, | |
| "grad_norm": 0.16216155417715333, | |
| "learning_rate": 2.529102461995364e-05, | |
| "loss": 0.4471, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 3.279047619047619, | |
| "grad_norm": 0.1586744564208733, | |
| "learning_rate": 2.48916813918414e-05, | |
| "loss": 0.4486, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.2942857142857145, | |
| "grad_norm": 0.14092622353201845, | |
| "learning_rate": 2.449408698725928e-05, | |
| "loss": 0.445, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 3.3095238095238093, | |
| "grad_norm": 0.15562276697944483, | |
| "learning_rate": 2.409828742869777e-05, | |
| "loss": 0.4415, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 3.3247619047619046, | |
| "grad_norm": 0.14631955274946826, | |
| "learning_rate": 2.370432853088978e-05, | |
| "loss": 0.454, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "grad_norm": 0.14585582091880941, | |
| "learning_rate": 2.3312255895507336e-05, | |
| "loss": 0.4473, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 3.355238095238095, | |
| "grad_norm": 0.1458201114996886, | |
| "learning_rate": 2.2922114905883167e-05, | |
| "loss": 0.4444, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.3704761904761904, | |
| "grad_norm": 0.1402608135797642, | |
| "learning_rate": 2.2533950721757447e-05, | |
| "loss": 0.4465, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 3.3857142857142857, | |
| "grad_norm": 0.14672569191797688, | |
| "learning_rate": 2.2147808274050436e-05, | |
| "loss": 0.4484, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 3.400952380952381, | |
| "grad_norm": 0.15377591576947733, | |
| "learning_rate": 2.1763732259661657e-05, | |
| "loss": 0.4479, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 3.416190476190476, | |
| "grad_norm": 0.14739415052119614, | |
| "learning_rate": 2.138176713629605e-05, | |
| "loss": 0.4475, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 3.4314285714285715, | |
| "grad_norm": 0.14607883746358746, | |
| "learning_rate": 2.1001957117318002e-05, | |
| "loss": 0.4421, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.4466666666666668, | |
| "grad_norm": 0.1465432249850391, | |
| "learning_rate": 2.0624346166633425e-05, | |
| "loss": 0.4396, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 3.461904761904762, | |
| "grad_norm": 0.12349201931738121, | |
| "learning_rate": 2.0248977993600912e-05, | |
| "loss": 0.4491, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 3.4771428571428573, | |
| "grad_norm": 0.14981067132432477, | |
| "learning_rate": 1.987589604797227e-05, | |
| "loss": 0.4448, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 3.4923809523809526, | |
| "grad_norm": 0.1137656845152472, | |
| "learning_rate": 1.9505143514863056e-05, | |
| "loss": 0.4489, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 3.5076190476190474, | |
| "grad_norm": 0.1398849247762614, | |
| "learning_rate": 1.913676330975385e-05, | |
| "loss": 0.4546, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.522857142857143, | |
| "grad_norm": 0.12820525613752842, | |
| "learning_rate": 1.8770798073522657e-05, | |
| "loss": 0.447, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.538095238095238, | |
| "grad_norm": 0.13773852598915348, | |
| "learning_rate": 1.8407290167509163e-05, | |
| "loss": 0.4543, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.5533333333333332, | |
| "grad_norm": 0.12262425169332791, | |
| "learning_rate": 1.8046281668611242e-05, | |
| "loss": 0.444, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.5685714285714285, | |
| "grad_norm": 0.11207677036541486, | |
| "learning_rate": 1.7687814364414508e-05, | |
| "loss": 0.4451, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.583809523809524, | |
| "grad_norm": 0.11456531045490899, | |
| "learning_rate": 1.73319297483553e-05, | |
| "loss": 0.4465, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.599047619047619, | |
| "grad_norm": 0.12011483524657235, | |
| "learning_rate": 1.6978669014917723e-05, | |
| "loss": 0.4406, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.6142857142857143, | |
| "grad_norm": 0.11200693227331737, | |
| "learning_rate": 1.6628073054865262e-05, | |
| "loss": 0.4539, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.6295238095238096, | |
| "grad_norm": 0.11945653235133909, | |
| "learning_rate": 1.6280182450507593e-05, | |
| "loss": 0.4505, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.644761904761905, | |
| "grad_norm": 0.11328522908106976, | |
| "learning_rate": 1.593503747100309e-05, | |
| "loss": 0.44, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "grad_norm": 0.11584353620347927, | |
| "learning_rate": 1.5592678067697594e-05, | |
| "loss": 0.4401, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.675238095238095, | |
| "grad_norm": 0.11338268232248719, | |
| "learning_rate": 1.5253143869499875e-05, | |
| "loss": 0.4526, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.6904761904761907, | |
| "grad_norm": 0.11825521687064344, | |
| "learning_rate": 1.4916474178294578e-05, | |
| "loss": 0.4472, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.7057142857142855, | |
| "grad_norm": 0.10393717835800384, | |
| "learning_rate": 1.4582707964392909e-05, | |
| "loss": 0.4503, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.720952380952381, | |
| "grad_norm": 0.11344323679399201, | |
| "learning_rate": 1.4251883862021715e-05, | |
| "loss": 0.4507, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.736190476190476, | |
| "grad_norm": 0.11795276330820621, | |
| "learning_rate": 1.3924040164851497e-05, | |
| "loss": 0.4509, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.7514285714285713, | |
| "grad_norm": 0.10446068928724489, | |
| "learning_rate": 1.359921482156382e-05, | |
| "loss": 0.453, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.7666666666666666, | |
| "grad_norm": 0.11963166659223025, | |
| "learning_rate": 1.3277445431458653e-05, | |
| "loss": 0.4423, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.781904761904762, | |
| "grad_norm": 0.11133235152901447, | |
| "learning_rate": 1.2958769240102206e-05, | |
| "loss": 0.4465, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.797142857142857, | |
| "grad_norm": 0.10488189725095455, | |
| "learning_rate": 1.264322313501559e-05, | |
| "loss": 0.4495, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.8123809523809524, | |
| "grad_norm": 0.09788048647298295, | |
| "learning_rate": 1.2330843641405057e-05, | |
| "loss": 0.4451, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.8276190476190477, | |
| "grad_norm": 0.1068932171231341, | |
| "learning_rate": 1.2021666917934125e-05, | |
| "loss": 0.4473, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.842857142857143, | |
| "grad_norm": 0.09965606097681808, | |
| "learning_rate": 1.1715728752538103e-05, | |
| "loss": 0.4443, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.8580952380952382, | |
| "grad_norm": 0.10053631876045145, | |
| "learning_rate": 1.1413064558281564e-05, | |
| "loss": 0.4468, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.873333333333333, | |
| "grad_norm": 0.09801079877642455, | |
| "learning_rate": 1.1113709369259187e-05, | |
| "loss": 0.4426, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.888571428571429, | |
| "grad_norm": 0.09483370655189176, | |
| "learning_rate": 1.0817697836540493e-05, | |
| "loss": 0.4493, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.9038095238095236, | |
| "grad_norm": 0.09358289511323944, | |
| "learning_rate": 1.052506422415891e-05, | |
| "loss": 0.4415, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.919047619047619, | |
| "grad_norm": 0.09937735008586661, | |
| "learning_rate": 1.0235842405145587e-05, | |
| "loss": 0.447, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.934285714285714, | |
| "grad_norm": 0.09713725112426527, | |
| "learning_rate": 9.950065857608537e-06, | |
| "loss": 0.4449, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.9495238095238094, | |
| "grad_norm": 0.10061824714900855, | |
| "learning_rate": 9.667767660857508e-06, | |
| "loss": 0.4378, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.9647619047619047, | |
| "grad_norm": 0.09975526240303212, | |
| "learning_rate": 9.388980491574901e-06, | |
| "loss": 0.4485, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "grad_norm": 0.09583315993538538, | |
| "learning_rate": 9.11373662003343e-06, | |
| "loss": 0.4518, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.9952380952380953, | |
| "grad_norm": 0.12153372041343533, | |
| "learning_rate": 8.842067906360716e-06, | |
| "loss": 0.559, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 4.011428571428572, | |
| "grad_norm": 0.14086095004056465, | |
| "learning_rate": 8.574005796851419e-06, | |
| "loss": 0.5283, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 4.026666666666666, | |
| "grad_norm": 0.11788004209905123, | |
| "learning_rate": 8.309581320327282e-06, | |
| "loss": 0.4248, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 4.041904761904762, | |
| "grad_norm": 0.11207857563036874, | |
| "learning_rate": 8.048825084545413e-06, | |
| "loss": 0.4208, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 4.057142857142857, | |
| "grad_norm": 0.11130142780853505, | |
| "learning_rate": 7.791767272655403e-06, | |
| "loss": 0.4282, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 4.072380952380953, | |
| "grad_norm": 0.11636579346899405, | |
| "learning_rate": 7.5384376397055644e-06, | |
| "loss": 0.4275, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 4.0876190476190475, | |
| "grad_norm": 0.12911191155727517, | |
| "learning_rate": 7.2888655091986995e-06, | |
| "loss": 0.4296, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 4.102857142857143, | |
| "grad_norm": 0.1207757027022162, | |
| "learning_rate": 7.043079769697833e-06, | |
| "loss": 0.4301, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 4.118095238095238, | |
| "grad_norm": 0.10942499693335449, | |
| "learning_rate": 6.801108871482287e-06, | |
| "loss": 0.432, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.133333333333334, | |
| "grad_norm": 0.11446276362313004, | |
| "learning_rate": 6.562980823254519e-06, | |
| "loss": 0.4313, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 4.148571428571429, | |
| "grad_norm": 0.11355557887705738, | |
| "learning_rate": 6.328723188898029e-06, | |
| "loss": 0.4279, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 4.163809523809523, | |
| "grad_norm": 0.1062761145201427, | |
| "learning_rate": 6.098363084286765e-06, | |
| "loss": 0.4259, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 4.179047619047619, | |
| "grad_norm": 0.10273617911128365, | |
| "learning_rate": 5.8719271741464056e-06, | |
| "loss": 0.4225, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 4.194285714285714, | |
| "grad_norm": 0.10086062111767927, | |
| "learning_rate": 5.6494416689678855e-06, | |
| "loss": 0.4189, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.20952380952381, | |
| "grad_norm": 0.09559781846200713, | |
| "learning_rate": 5.430932321973425e-06, | |
| "loss": 0.4158, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 4.2247619047619045, | |
| "grad_norm": 0.09465874774217785, | |
| "learning_rate": 5.21642442613556e-06, | |
| "loss": 0.4299, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.09527880228230132, | |
| "learning_rate": 5.005942811249412e-06, | |
| "loss": 0.4243, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 4.255238095238095, | |
| "grad_norm": 0.09428002792877421, | |
| "learning_rate": 4.799511841058575e-06, | |
| "loss": 0.4262, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 4.270476190476191, | |
| "grad_norm": 0.09763553814742547, | |
| "learning_rate": 4.597155410434972e-06, | |
| "loss": 0.4247, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.09287137908820702, | |
| "learning_rate": 4.398896942612934e-06, | |
| "loss": 0.4238, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 4.300952380952381, | |
| "grad_norm": 0.09888369046987704, | |
| "learning_rate": 4.204759386477961e-06, | |
| "loss": 0.4391, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 4.316190476190476, | |
| "grad_norm": 0.09609824332962343, | |
| "learning_rate": 4.01476521391027e-06, | |
| "loss": 0.4352, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 4.331428571428571, | |
| "grad_norm": 0.09233308942677047, | |
| "learning_rate": 3.828936417183671e-06, | |
| "loss": 0.43, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 4.346666666666667, | |
| "grad_norm": 0.0890288964909307, | |
| "learning_rate": 3.6472945064198827e-06, | |
| "loss": 0.427, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.3619047619047615, | |
| "grad_norm": 0.08989519773450554, | |
| "learning_rate": 3.4698605070987077e-06, | |
| "loss": 0.4257, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 4.377142857142857, | |
| "grad_norm": 0.09526649707374434, | |
| "learning_rate": 3.2966549576242477e-06, | |
| "loss": 0.4357, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 4.392380952380952, | |
| "grad_norm": 0.09131870660723178, | |
| "learning_rate": 3.127697906947584e-06, | |
| "loss": 0.4272, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 4.407619047619048, | |
| "grad_norm": 0.08945756240639792, | |
| "learning_rate": 2.963008912246008e-06, | |
| "loss": 0.4347, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 4.422857142857143, | |
| "grad_norm": 0.08483664610068463, | |
| "learning_rate": 2.802607036659284e-06, | |
| "loss": 0.4294, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.438095238095238, | |
| "grad_norm": 0.08486549550090346, | |
| "learning_rate": 2.646510847082997e-06, | |
| "loss": 0.4303, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 4.453333333333333, | |
| "grad_norm": 0.084355078151828, | |
| "learning_rate": 2.494738412019442e-06, | |
| "loss": 0.423, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 4.468571428571429, | |
| "grad_norm": 0.08518364314817348, | |
| "learning_rate": 2.3473072994861167e-06, | |
| "loss": 0.427, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 4.483809523809524, | |
| "grad_norm": 0.08287931866258626, | |
| "learning_rate": 2.2042345749821913e-06, | |
| "loss": 0.4216, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 4.499047619047619, | |
| "grad_norm": 0.08289838691379334, | |
| "learning_rate": 2.065536799513135e-06, | |
| "loss": 0.4288, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.514285714285714, | |
| "grad_norm": 0.0850326818939402, | |
| "learning_rate": 1.9312300276737516e-06, | |
| "loss": 0.4235, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 4.529523809523809, | |
| "grad_norm": 0.08392478938826013, | |
| "learning_rate": 1.8013298057897932e-06, | |
| "loss": 0.4293, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 4.544761904761905, | |
| "grad_norm": 0.0892781985318678, | |
| "learning_rate": 1.6758511701184809e-06, | |
| "loss": 0.4238, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 4.5600000000000005, | |
| "grad_norm": 0.08180406525124756, | |
| "learning_rate": 1.5548086451079747e-06, | |
| "loss": 0.4384, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 4.575238095238095, | |
| "grad_norm": 0.08011550631601072, | |
| "learning_rate": 1.4382162417161748e-06, | |
| "loss": 0.4206, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.59047619047619, | |
| "grad_norm": 0.08066581660175723, | |
| "learning_rate": 1.3260874557889002e-06, | |
| "loss": 0.4287, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 4.605714285714286, | |
| "grad_norm": 0.0806215224586864, | |
| "learning_rate": 1.2184352664977105e-06, | |
| "loss": 0.4175, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 4.620952380952381, | |
| "grad_norm": 0.07969848747023248, | |
| "learning_rate": 1.115272134837544e-06, | |
| "loss": 0.4312, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 4.636190476190476, | |
| "grad_norm": 0.0797095157185979, | |
| "learning_rate": 1.0166100021843283e-06, | |
| "loss": 0.4346, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 4.651428571428571, | |
| "grad_norm": 0.0792961159548818, | |
| "learning_rate": 9.224602889127321e-07, | |
| "loss": 0.4286, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.08285323866652071, | |
| "learning_rate": 8.328338930742296e-07, | |
| "loss": 0.4267, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 4.681904761904762, | |
| "grad_norm": 0.07823875843722773, | |
| "learning_rate": 7.477411891356268e-07, | |
| "loss": 0.4276, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.6971428571428575, | |
| "grad_norm": 0.07763022247982207, | |
| "learning_rate": 6.671920267782029e-07, | |
| "loss": 0.4282, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.712380952380952, | |
| "grad_norm": 0.07871343041787499, | |
| "learning_rate": 5.911957297575743e-07, | |
| "loss": 0.4323, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.727619047619047, | |
| "grad_norm": 0.07681136995775628, | |
| "learning_rate": 5.197610948244469e-07, | |
| "loss": 0.4256, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.742857142857143, | |
| "grad_norm": 0.07665250706053243, | |
| "learning_rate": 4.5289639070638103e-07, | |
| "loss": 0.4171, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.758095238095238, | |
| "grad_norm": 0.07835687105047397, | |
| "learning_rate": 3.906093571506597e-07, | |
| "loss": 0.428, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.773333333333333, | |
| "grad_norm": 0.07949715755664716, | |
| "learning_rate": 3.329072040284009e-07, | |
| "loss": 0.4208, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.788571428571428, | |
| "grad_norm": 0.0773084553588578, | |
| "learning_rate": 2.797966104999805e-07, | |
| "loss": 0.4332, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.803809523809524, | |
| "grad_norm": 0.07795013678746698, | |
| "learning_rate": 2.3128372424192635e-07, | |
| "loss": 0.4377, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.819047619047619, | |
| "grad_norm": 0.07666311315306534, | |
| "learning_rate": 1.8737416073529635e-07, | |
| "loss": 0.4272, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.8342857142857145, | |
| "grad_norm": 0.07693103942282913, | |
| "learning_rate": 1.4807300261568735e-07, | |
| "loss": 0.4287, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.849523809523809, | |
| "grad_norm": 0.08092920067828374, | |
| "learning_rate": 1.1338479908488798e-07, | |
| "loss": 0.4337, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.864761904761905, | |
| "grad_norm": 0.07921080037622698, | |
| "learning_rate": 8.331356538431313e-08, | |
| "loss": 0.4262, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 0.08216408996820164, | |
| "learning_rate": 5.7862782330215804e-08, | |
| "loss": 0.4317, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.895238095238096, | |
| "grad_norm": 0.07591222440722502, | |
| "learning_rate": 3.703539591080052e-08, | |
| "loss": 0.4232, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.91047619047619, | |
| "grad_norm": 0.08045135907636206, | |
| "learning_rate": 2.0833816945184936e-08, | |
| "loss": 0.4297, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.925714285714285, | |
| "grad_norm": 0.07684321088822434, | |
| "learning_rate": 9.25992080436533e-09, | |
| "loss": 0.4276, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.940952380952381, | |
| "grad_norm": 0.07680665420588856, | |
| "learning_rate": 2.315047194136888e-09, | |
| "loss": 0.4271, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.956190476190476, | |
| "grad_norm": 0.07663586105720761, | |
| "learning_rate": 0.0, | |
| "loss": 0.4248, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.956190476190476, | |
| "step": 325, | |
| "total_flos": 8.641809758367515e+18, | |
| "train_loss": 0.5190348960803105, | |
| "train_runtime": 75842.3225, | |
| "train_samples_per_second": 2.214, | |
| "train_steps_per_second": 0.004 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 325, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.641809758367515e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |