| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.94557270511779, | |
| "eval_steps": 500, | |
| "global_step": 380, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012997562956945572, | |
| "grad_norm": 6.0997486989802905, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 1.0219, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.025995125913891144, | |
| "grad_norm": 6.068913238622443, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 1.0148, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03899268887083672, | |
| "grad_norm": 5.6044836848817114, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 0.9978, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05199025182778229, | |
| "grad_norm": 4.031804147696358, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 0.9693, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06498781478472786, | |
| "grad_norm": 2.364074737584526, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.932, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07798537774167344, | |
| "grad_norm": 4.799631400433253, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 0.9538, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09098294069861901, | |
| "grad_norm": 4.8356162951563455, | |
| "learning_rate": 1.4736842105263159e-05, | |
| "loss": 0.9456, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10398050365556458, | |
| "grad_norm": 5.701590950772652, | |
| "learning_rate": 1.6842105263157896e-05, | |
| "loss": 0.9347, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11697806661251016, | |
| "grad_norm": 4.122702114221551, | |
| "learning_rate": 1.894736842105263e-05, | |
| "loss": 0.9102, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.12997562956945571, | |
| "grad_norm": 2.755137952195614, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 0.8668, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1429731925264013, | |
| "grad_norm": 1.816857217622971, | |
| "learning_rate": 2.3157894736842107e-05, | |
| "loss": 0.829, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15597075548334688, | |
| "grad_norm": 1.628266006559788, | |
| "learning_rate": 2.526315789473684e-05, | |
| "loss": 0.8096, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.16896831844029245, | |
| "grad_norm": 1.0893348868429666, | |
| "learning_rate": 2.7368421052631583e-05, | |
| "loss": 0.7878, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18196588139723802, | |
| "grad_norm": 1.3192042901208236, | |
| "learning_rate": 2.9473684210526317e-05, | |
| "loss": 0.7802, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19496344435418358, | |
| "grad_norm": 1.4149602566297217, | |
| "learning_rate": 3.157894736842106e-05, | |
| "loss": 0.7771, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.20796100731112915, | |
| "grad_norm": 1.2978347644512884, | |
| "learning_rate": 3.368421052631579e-05, | |
| "loss": 0.7706, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.22095857026807472, | |
| "grad_norm": 1.2598328438699724, | |
| "learning_rate": 3.578947368421053e-05, | |
| "loss": 0.7457, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23395613322502032, | |
| "grad_norm": 1.2396850991167663, | |
| "learning_rate": 3.789473684210526e-05, | |
| "loss": 0.7417, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2469536961819659, | |
| "grad_norm": 0.9708304848685996, | |
| "learning_rate": 4e-05, | |
| "loss": 0.7314, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.25995125913891143, | |
| "grad_norm": 1.7248674306611338, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 0.7411, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.272948822095857, | |
| "grad_norm": 1.3316644956564598, | |
| "learning_rate": 4.421052631578948e-05, | |
| "loss": 0.7289, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2859463850528026, | |
| "grad_norm": 1.1822781700583618, | |
| "learning_rate": 4.6315789473684214e-05, | |
| "loss": 0.7153, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.29894394800974816, | |
| "grad_norm": 1.8096647661880758, | |
| "learning_rate": 4.842105263157895e-05, | |
| "loss": 0.7235, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.31194151096669376, | |
| "grad_norm": 1.1134508765191324, | |
| "learning_rate": 5.052631578947368e-05, | |
| "loss": 0.7169, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3249390739236393, | |
| "grad_norm": 1.8769448768396773, | |
| "learning_rate": 5.263157894736843e-05, | |
| "loss": 0.7193, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3379366368805849, | |
| "grad_norm": 1.5426211438216848, | |
| "learning_rate": 5.4736842105263165e-05, | |
| "loss": 0.6987, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35093419983753044, | |
| "grad_norm": 1.6191545464577533, | |
| "learning_rate": 5.68421052631579e-05, | |
| "loss": 0.7044, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.36393176279447603, | |
| "grad_norm": 1.8506957580033367, | |
| "learning_rate": 5.8947368421052634e-05, | |
| "loss": 0.6979, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.37692932575142163, | |
| "grad_norm": 1.0979665335426236, | |
| "learning_rate": 6.105263157894738e-05, | |
| "loss": 0.6893, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.38992688870836717, | |
| "grad_norm": 1.502570696227111, | |
| "learning_rate": 6.315789473684212e-05, | |
| "loss": 0.6827, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.40292445166531277, | |
| "grad_norm": 0.9317441975712487, | |
| "learning_rate": 6.526315789473685e-05, | |
| "loss": 0.6835, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4159220146222583, | |
| "grad_norm": 1.9371596519249814, | |
| "learning_rate": 6.736842105263159e-05, | |
| "loss": 0.6888, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4289195775792039, | |
| "grad_norm": 1.077638882755355, | |
| "learning_rate": 6.947368421052632e-05, | |
| "loss": 0.6861, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.44191714053614944, | |
| "grad_norm": 1.6213279548147228, | |
| "learning_rate": 7.157894736842105e-05, | |
| "loss": 0.6832, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.45491470349309504, | |
| "grad_norm": 1.1576903226097717, | |
| "learning_rate": 7.368421052631579e-05, | |
| "loss": 0.6859, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.46791226645004064, | |
| "grad_norm": 1.5908649773588832, | |
| "learning_rate": 7.578947368421052e-05, | |
| "loss": 0.6702, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4809098294069862, | |
| "grad_norm": 1.3222880266963757, | |
| "learning_rate": 7.789473684210527e-05, | |
| "loss": 0.671, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4939073923639318, | |
| "grad_norm": 1.420249377015549, | |
| "learning_rate": 8e-05, | |
| "loss": 0.6665, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5069049553208773, | |
| "grad_norm": 1.768938045647054, | |
| "learning_rate": 7.99983123807325e-05, | |
| "loss": 0.6727, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5199025182778229, | |
| "grad_norm": 1.6982948658344774, | |
| "learning_rate": 7.999324966533291e-05, | |
| "loss": 0.669, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5329000812347685, | |
| "grad_norm": 0.8797512479401394, | |
| "learning_rate": 7.998481228099806e-05, | |
| "loss": 0.661, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.545897644191714, | |
| "grad_norm": 1.5855427665518893, | |
| "learning_rate": 7.997300093968255e-05, | |
| "loss": 0.6726, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5588952071486596, | |
| "grad_norm": 1.3274746178364085, | |
| "learning_rate": 7.995781663803876e-05, | |
| "loss": 0.6677, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5718927701056052, | |
| "grad_norm": 1.5337377099856493, | |
| "learning_rate": 7.993926065733265e-05, | |
| "loss": 0.668, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.5848903330625508, | |
| "grad_norm": 1.4378683874970328, | |
| "learning_rate": 7.991733456333579e-05, | |
| "loss": 0.6664, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5978878960194963, | |
| "grad_norm": 1.0488679417844093, | |
| "learning_rate": 7.98920402061931e-05, | |
| "loss": 0.6491, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6108854589764419, | |
| "grad_norm": 1.724437071603027, | |
| "learning_rate": 7.98633797202668e-05, | |
| "loss": 0.6628, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6238830219333875, | |
| "grad_norm": 1.258163031927199, | |
| "learning_rate": 7.98313555239563e-05, | |
| "loss": 0.6542, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6368805848903331, | |
| "grad_norm": 1.704192273592525, | |
| "learning_rate": 7.979597031949415e-05, | |
| "loss": 0.6624, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6498781478472786, | |
| "grad_norm": 1.1814387575753857, | |
| "learning_rate": 7.975722709271799e-05, | |
| "loss": 0.646, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6628757108042242, | |
| "grad_norm": 1.7786158488838122, | |
| "learning_rate": 7.97151291128186e-05, | |
| "loss": 0.648, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6758732737611698, | |
| "grad_norm": 1.571640473826489, | |
| "learning_rate": 7.96696799320641e-05, | |
| "loss": 0.6423, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.6888708367181153, | |
| "grad_norm": 0.9402903203259698, | |
| "learning_rate": 7.962088338550013e-05, | |
| "loss": 0.6414, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7018683996750609, | |
| "grad_norm": 1.3502958792513085, | |
| "learning_rate": 7.956874359062632e-05, | |
| "loss": 0.6429, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7148659626320065, | |
| "grad_norm": 0.6507620845472206, | |
| "learning_rate": 7.951326494704878e-05, | |
| "loss": 0.6321, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7278635255889521, | |
| "grad_norm": 1.1688029322337978, | |
| "learning_rate": 7.94544521361089e-05, | |
| "loss": 0.638, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7408610885458976, | |
| "grad_norm": 1.59775329650898, | |
| "learning_rate": 7.939231012048833e-05, | |
| "loss": 0.6394, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7538586515028433, | |
| "grad_norm": 0.8720639512920967, | |
| "learning_rate": 7.932684414379021e-05, | |
| "loss": 0.6368, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7668562144597888, | |
| "grad_norm": 0.867015921744947, | |
| "learning_rate": 7.925805973009672e-05, | |
| "loss": 0.6325, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7798537774167343, | |
| "grad_norm": 1.318563507969403, | |
| "learning_rate": 7.918596268350296e-05, | |
| "loss": 0.6461, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.7928513403736799, | |
| "grad_norm": 1.3836000857634128, | |
| "learning_rate": 7.911055908762718e-05, | |
| "loss": 0.6439, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8058489033306255, | |
| "grad_norm": 0.7899305091503791, | |
| "learning_rate": 7.903185530509743e-05, | |
| "loss": 0.6278, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8188464662875711, | |
| "grad_norm": 1.4077197027429933, | |
| "learning_rate": 7.894985797701472e-05, | |
| "loss": 0.6383, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8318440292445166, | |
| "grad_norm": 0.7458184956018674, | |
| "learning_rate": 7.886457402239256e-05, | |
| "loss": 0.6239, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8448415922014623, | |
| "grad_norm": 1.167599790945464, | |
| "learning_rate": 7.877601063757323e-05, | |
| "loss": 0.6306, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.8578391551584078, | |
| "grad_norm": 1.2996321903912411, | |
| "learning_rate": 7.868417529562043e-05, | |
| "loss": 0.6194, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8708367181153533, | |
| "grad_norm": 0.9895793316487125, | |
| "learning_rate": 7.858907574568882e-05, | |
| "loss": 0.6267, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8838342810722989, | |
| "grad_norm": 1.5855243461352568, | |
| "learning_rate": 7.849072001237001e-05, | |
| "loss": 0.622, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.8968318440292445, | |
| "grad_norm": 0.7421138376516303, | |
| "learning_rate": 7.838911639501557e-05, | |
| "loss": 0.6175, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9098294069861901, | |
| "grad_norm": 0.7858592220019243, | |
| "learning_rate": 7.828427346703657e-05, | |
| "loss": 0.615, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9228269699431356, | |
| "grad_norm": 1.0775513618300434, | |
| "learning_rate": 7.81762000751803e-05, | |
| "loss": 0.6196, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9358245329000813, | |
| "grad_norm": 1.0373464409020101, | |
| "learning_rate": 7.806490533878368e-05, | |
| "loss": 0.6201, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9488220958570268, | |
| "grad_norm": 1.1465250070186461, | |
| "learning_rate": 7.795039864900378e-05, | |
| "loss": 0.6152, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9618196588139724, | |
| "grad_norm": 1.4979962912932638, | |
| "learning_rate": 7.783268966802539e-05, | |
| "loss": 0.6282, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.974817221770918, | |
| "grad_norm": 0.5826803556978378, | |
| "learning_rate": 7.771178832824573e-05, | |
| "loss": 0.6124, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9878147847278635, | |
| "grad_norm": 1.2770979259997852, | |
| "learning_rate": 7.758770483143634e-05, | |
| "loss": 0.614, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.0024370430544274, | |
| "grad_norm": 0.9789998519223624, | |
| "learning_rate": 7.74604496478822e-05, | |
| "loss": 0.6145, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0154346060113728, | |
| "grad_norm": 1.0066060430115773, | |
| "learning_rate": 7.733003351549829e-05, | |
| "loss": 0.5866, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0284321689683185, | |
| "grad_norm": 1.2036951705549706, | |
| "learning_rate": 7.719646743892352e-05, | |
| "loss": 0.591, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.0414297319252641, | |
| "grad_norm": 1.0649768587352126, | |
| "learning_rate": 7.705976268859207e-05, | |
| "loss": 0.5936, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0544272948822095, | |
| "grad_norm": 0.7832240050261705, | |
| "learning_rate": 7.691993079978252e-05, | |
| "loss": 0.5838, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0674248578391552, | |
| "grad_norm": 0.9810569125748073, | |
| "learning_rate": 7.677698357164431e-05, | |
| "loss": 0.586, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0804224207961006, | |
| "grad_norm": 1.4858713089160471, | |
| "learning_rate": 7.663093306620231e-05, | |
| "loss": 0.6045, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0934199837530463, | |
| "grad_norm": 0.6254621025890486, | |
| "learning_rate": 7.648179160733883e-05, | |
| "loss": 0.5814, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.106417546709992, | |
| "grad_norm": 1.2613394865623482, | |
| "learning_rate": 7.632957177975387e-05, | |
| "loss": 0.5946, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1194151096669374, | |
| "grad_norm": 0.8850396265868055, | |
| "learning_rate": 7.61742864279031e-05, | |
| "loss": 0.5923, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.132412672623883, | |
| "grad_norm": 0.8583010134273165, | |
| "learning_rate": 7.601594865491414e-05, | |
| "loss": 0.5803, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.1454102355808287, | |
| "grad_norm": 0.9973680934543633, | |
| "learning_rate": 7.585457182148081e-05, | |
| "loss": 0.5838, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.158407798537774, | |
| "grad_norm": 0.5259645145392762, | |
| "learning_rate": 7.569016954473577e-05, | |
| "loss": 0.5811, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.1714053614947197, | |
| "grad_norm": 0.8534158462427184, | |
| "learning_rate": 7.552275569710152e-05, | |
| "loss": 0.5875, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1844029244516654, | |
| "grad_norm": 0.8938394352981215, | |
| "learning_rate": 7.535234440511979e-05, | |
| "loss": 0.5811, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.1974004874086108, | |
| "grad_norm": 0.6878422249419367, | |
| "learning_rate": 7.517895004825956e-05, | |
| "loss": 0.5808, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2103980503655565, | |
| "grad_norm": 0.6842348524486839, | |
| "learning_rate": 7.500258725770375e-05, | |
| "loss": 0.5768, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2233956133225021, | |
| "grad_norm": 0.44927337592920435, | |
| "learning_rate": 7.48232709151145e-05, | |
| "loss": 0.5705, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2363931762794476, | |
| "grad_norm": 0.579052975299202, | |
| "learning_rate": 7.464101615137756e-05, | |
| "loss": 0.5818, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.2493907392363932, | |
| "grad_norm": 0.475371873748839, | |
| "learning_rate": 7.445583834532546e-05, | |
| "loss": 0.5642, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2623883021933389, | |
| "grad_norm": 0.6520756246411096, | |
| "learning_rate": 7.426775312243986e-05, | |
| "loss": 0.5779, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.2753858651502843, | |
| "grad_norm": 0.6077714937003503, | |
| "learning_rate": 7.407677635353308e-05, | |
| "loss": 0.5743, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.28838342810723, | |
| "grad_norm": 0.37305083323017324, | |
| "learning_rate": 7.388292415340888e-05, | |
| "loss": 0.568, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3013809910641756, | |
| "grad_norm": 0.5642684450490978, | |
| "learning_rate": 7.368621287950264e-05, | |
| "loss": 0.5657, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.314378554021121, | |
| "grad_norm": 0.606405644472531, | |
| "learning_rate": 7.348665913050115e-05, | |
| "loss": 0.5608, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.3273761169780667, | |
| "grad_norm": 0.5507521692599793, | |
| "learning_rate": 7.328427974494201e-05, | |
| "loss": 0.5676, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.340373679935012, | |
| "grad_norm": 0.46420174071672377, | |
| "learning_rate": 7.307909179979274e-05, | |
| "loss": 0.5649, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.3533712428919578, | |
| "grad_norm": 0.40419774560998856, | |
| "learning_rate": 7.28711126090098e-05, | |
| "loss": 0.5662, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.3663688058489034, | |
| "grad_norm": 0.3452636341624681, | |
| "learning_rate": 7.266035972207773e-05, | |
| "loss": 0.5576, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.3793663688058488, | |
| "grad_norm": 0.4425477270397441, | |
| "learning_rate": 7.24468509225281e-05, | |
| "loss": 0.5696, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.3923639317627945, | |
| "grad_norm": 0.38812714960466954, | |
| "learning_rate": 7.223060422643914e-05, | |
| "loss": 0.5681, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.40536149471974, | |
| "grad_norm": 0.4605494846925124, | |
| "learning_rate": 7.201163788091536e-05, | |
| "loss": 0.568, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4183590576766856, | |
| "grad_norm": 0.5362082001206658, | |
| "learning_rate": 7.178997036254799e-05, | |
| "loss": 0.5681, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.4313566206336312, | |
| "grad_norm": 0.8253712799595396, | |
| "learning_rate": 7.156562037585576e-05, | |
| "loss": 0.5681, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4443541835905767, | |
| "grad_norm": 1.0494191842707108, | |
| "learning_rate": 7.133860685170665e-05, | |
| "loss": 0.5766, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4573517465475223, | |
| "grad_norm": 0.9402405531131097, | |
| "learning_rate": 7.110894894572056e-05, | |
| "loss": 0.5654, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.470349309504468, | |
| "grad_norm": 0.6225802745509316, | |
| "learning_rate": 7.087666603665284e-05, | |
| "loss": 0.5613, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.4833468724614134, | |
| "grad_norm": 0.5144406584023249, | |
| "learning_rate": 7.064177772475912e-05, | |
| "loss": 0.5695, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.496344435418359, | |
| "grad_norm": 0.5600022005241243, | |
| "learning_rate": 7.040430383014146e-05, | |
| "loss": 0.5671, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.5093419983753047, | |
| "grad_norm": 0.5708170768902237, | |
| "learning_rate": 7.016426439107586e-05, | |
| "loss": 0.565, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5223395613322501, | |
| "grad_norm": 0.4252834967763784, | |
| "learning_rate": 6.992167966232143e-05, | |
| "loss": 0.5571, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5353371242891958, | |
| "grad_norm": 0.4633342885069769, | |
| "learning_rate": 6.967657011341126e-05, | |
| "loss": 0.5764, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.5483346872461414, | |
| "grad_norm": 0.5070623003686142, | |
| "learning_rate": 6.942895642692527e-05, | |
| "loss": 0.5622, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.5613322502030869, | |
| "grad_norm": 0.4491248883432154, | |
| "learning_rate": 6.917885949674483e-05, | |
| "loss": 0.5668, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.5743298131600325, | |
| "grad_norm": 0.5508769887810068, | |
| "learning_rate": 6.892630042628988e-05, | |
| "loss": 0.5579, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.5873273761169782, | |
| "grad_norm": 0.7170323919312661, | |
| "learning_rate": 6.867130052673806e-05, | |
| "loss": 0.5651, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.6003249390739236, | |
| "grad_norm": 0.7500286802767102, | |
| "learning_rate": 6.841388131522656e-05, | |
| "loss": 0.5644, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.6133225020308692, | |
| "grad_norm": 0.8689891585480664, | |
| "learning_rate": 6.815406451303647e-05, | |
| "loss": 0.5652, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.626320064987815, | |
| "grad_norm": 0.9174546075219608, | |
| "learning_rate": 6.789187204375981e-05, | |
| "loss": 0.5677, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6393176279447603, | |
| "grad_norm": 0.7089434351473182, | |
| "learning_rate": 6.762732603144978e-05, | |
| "loss": 0.563, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.652315190901706, | |
| "grad_norm": 0.42385408119599144, | |
| "learning_rate": 6.736044879875373e-05, | |
| "loss": 0.5554, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6653127538586516, | |
| "grad_norm": 0.3858940619147007, | |
| "learning_rate": 6.709126286502965e-05, | |
| "loss": 0.564, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.678310316815597, | |
| "grad_norm": 0.47585505760511126, | |
| "learning_rate": 6.681979094444596e-05, | |
| "loss": 0.559, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.6913078797725425, | |
| "grad_norm": 0.48267758584823545, | |
| "learning_rate": 6.654605594406486e-05, | |
| "loss": 0.5586, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.7043054427294884, | |
| "grad_norm": 0.39278797753133343, | |
| "learning_rate": 6.627008096190938e-05, | |
| "loss": 0.5581, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.7173030056864338, | |
| "grad_norm": 0.30339722409713105, | |
| "learning_rate": 6.59918892850144e-05, | |
| "loss": 0.5601, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.7303005686433792, | |
| "grad_norm": 0.42189749304955176, | |
| "learning_rate": 6.571150438746157e-05, | |
| "loss": 0.5584, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.743298131600325, | |
| "grad_norm": 0.4044466629234294, | |
| "learning_rate": 6.542894992839873e-05, | |
| "loss": 0.5524, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.7562956945572705, | |
| "grad_norm": 0.356328501602452, | |
| "learning_rate": 6.514424975004329e-05, | |
| "loss": 0.5551, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.769293257514216, | |
| "grad_norm": 0.3295018876147217, | |
| "learning_rate": 6.48574278756706e-05, | |
| "loss": 0.5554, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.7822908204711616, | |
| "grad_norm": 0.339714023957606, | |
| "learning_rate": 6.456850850758673e-05, | |
| "loss": 0.5503, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.7952883834281073, | |
| "grad_norm": 0.33807908223453265, | |
| "learning_rate": 6.427751602508628e-05, | |
| "loss": 0.5508, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.8082859463850527, | |
| "grad_norm": 0.3018858970919186, | |
| "learning_rate": 6.398447498239527e-05, | |
| "loss": 0.5588, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.8212835093419983, | |
| "grad_norm": 0.27735277755052196, | |
| "learning_rate": 6.368941010659921e-05, | |
| "loss": 0.5572, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.834281072298944, | |
| "grad_norm": 0.27336758705873004, | |
| "learning_rate": 6.339234629555655e-05, | |
| "loss": 0.5552, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8472786352558894, | |
| "grad_norm": 0.3639828168642031, | |
| "learning_rate": 6.309330861579786e-05, | |
| "loss": 0.5585, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.860276198212835, | |
| "grad_norm": 0.320921701299306, | |
| "learning_rate": 6.279232230041065e-05, | |
| "loss": 0.5529, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8732737611697807, | |
| "grad_norm": 0.2564010720238856, | |
| "learning_rate": 6.248941274691017e-05, | |
| "loss": 0.5505, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.8862713241267262, | |
| "grad_norm": 0.29505459687573216, | |
| "learning_rate": 6.218460551509636e-05, | |
| "loss": 0.5466, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.8992688870836718, | |
| "grad_norm": 0.2712430754308456, | |
| "learning_rate": 6.18779263248971e-05, | |
| "loss": 0.555, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.9122664500406175, | |
| "grad_norm": 0.2662237528109047, | |
| "learning_rate": 6.156940105419785e-05, | |
| "loss": 0.5491, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.925264012997563, | |
| "grad_norm": 0.2691614842068283, | |
| "learning_rate": 6.125905573665824e-05, | |
| "loss": 0.5511, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.9382615759545085, | |
| "grad_norm": 0.3015518307355036, | |
| "learning_rate": 6.094691655951512e-05, | |
| "loss": 0.5531, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.9512591389114542, | |
| "grad_norm": 0.3658821226584232, | |
| "learning_rate": 6.063300986137297e-05, | |
| "loss": 0.5519, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.9642567018683996, | |
| "grad_norm": 0.4663797478441434, | |
| "learning_rate": 6.0317362129981375e-05, | |
| "loss": 0.5512, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.9772542648253453, | |
| "grad_norm": 0.4971844054397088, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.5558, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.990251827782291, | |
| "grad_norm": 0.47361679355170094, | |
| "learning_rate": 5.968095025075114e-05, | |
| "loss": 0.5582, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.0048740861088548, | |
| "grad_norm": 0.44280822524000313, | |
| "learning_rate": 5.936023980395997e-05, | |
| "loss": 0.5421, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.0178716490658, | |
| "grad_norm": 0.3790773260403415, | |
| "learning_rate": 5.903789572148295e-05, | |
| "loss": 0.5126, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.0308692120227456, | |
| "grad_norm": 0.364460912451908, | |
| "learning_rate": 5.871394520302432e-05, | |
| "loss": 0.5046, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.0438667749796915, | |
| "grad_norm": 0.5020577735626967, | |
| "learning_rate": 5.838841558384091e-05, | |
| "loss": 0.5045, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.056864337936637, | |
| "grad_norm": 0.706168520770138, | |
| "learning_rate": 5.806133433243558e-05, | |
| "loss": 0.5145, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.0698619008935824, | |
| "grad_norm": 0.8010155128590188, | |
| "learning_rate": 5.7732729048239444e-05, | |
| "loss": 0.5092, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.0828594638505282, | |
| "grad_norm": 0.7577390677701559, | |
| "learning_rate": 5.740262745928293e-05, | |
| "loss": 0.5103, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.0958570268074737, | |
| "grad_norm": 0.7209012452956537, | |
| "learning_rate": 5.707105741985615e-05, | |
| "loss": 0.5104, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.108854589764419, | |
| "grad_norm": 0.6542953424011526, | |
| "learning_rate": 5.673804690815845e-05, | |
| "loss": 0.5117, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.121852152721365, | |
| "grad_norm": 0.5079580975512822, | |
| "learning_rate": 5.6403624023937614e-05, | |
| "loss": 0.5039, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.1348497156783104, | |
| "grad_norm": 0.5950400993667935, | |
| "learning_rate": 5.606781698611879e-05, | |
| "loss": 0.5043, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.147847278635256, | |
| "grad_norm": 0.6433692597883678, | |
| "learning_rate": 5.573065413042333e-05, | |
| "loss": 0.5069, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.1608448415922012, | |
| "grad_norm": 0.32946933531907296, | |
| "learning_rate": 5.5392163906977835e-05, | |
| "loss": 0.5072, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.173842404549147, | |
| "grad_norm": 0.4830710940230807, | |
| "learning_rate": 5.505237487791343e-05, | |
| "loss": 0.5054, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.1868399675060926, | |
| "grad_norm": 0.5572075901805054, | |
| "learning_rate": 5.471131571495574e-05, | |
| "loss": 0.5031, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.199837530463038, | |
| "grad_norm": 0.41100847958354275, | |
| "learning_rate": 5.4369015197005506e-05, | |
| "loss": 0.5115, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.212835093419984, | |
| "grad_norm": 0.397477863509405, | |
| "learning_rate": 5.4025502207710184e-05, | |
| "loss": 0.5073, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.2258326563769293, | |
| "grad_norm": 0.4132861651257392, | |
| "learning_rate": 5.368080573302676e-05, | |
| "loss": 0.5019, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.2388302193338747, | |
| "grad_norm": 0.36796560656231636, | |
| "learning_rate": 5.333495485877583e-05, | |
| "loss": 0.5025, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.2518277822908206, | |
| "grad_norm": 0.4503739632990606, | |
| "learning_rate": 5.298797876818735e-05, | |
| "loss": 0.5025, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.264825345247766, | |
| "grad_norm": 0.33000420931471053, | |
| "learning_rate": 5.263990673943811e-05, | |
| "loss": 0.5032, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.2778229082047114, | |
| "grad_norm": 0.39020307570919605, | |
| "learning_rate": 5.229076814318122e-05, | |
| "loss": 0.5033, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.2908204711616573, | |
| "grad_norm": 0.3739500669613302, | |
| "learning_rate": 5.194059244006779e-05, | |
| "loss": 0.4963, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.3038180341186028, | |
| "grad_norm": 0.28417009432734236, | |
| "learning_rate": 5.158940917826099e-05, | |
| "loss": 0.5049, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.316815597075548, | |
| "grad_norm": 0.536474729491158, | |
| "learning_rate": 5.123724799094279e-05, | |
| "loss": 0.4985, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.329813160032494, | |
| "grad_norm": 0.4904508294031785, | |
| "learning_rate": 5.088413859381341e-05, | |
| "loss": 0.4967, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.3428107229894395, | |
| "grad_norm": 0.2579052560741679, | |
| "learning_rate": 5.053011078258397e-05, | |
| "loss": 0.4997, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.355808285946385, | |
| "grad_norm": 0.3211967991576727, | |
| "learning_rate": 5.017519443046226e-05, | |
| "loss": 0.5072, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.368805848903331, | |
| "grad_norm": 0.366363860420648, | |
| "learning_rate": 4.981941948563197e-05, | |
| "loss": 0.4954, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.381803411860276, | |
| "grad_norm": 0.29020724122191843, | |
| "learning_rate": 4.94628159687257e-05, | |
| "loss": 0.4968, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.3948009748172217, | |
| "grad_norm": 0.21816500437499844, | |
| "learning_rate": 4.9105413970291747e-05, | |
| "loss": 0.5013, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.4077985377741675, | |
| "grad_norm": 0.20849558134047014, | |
| "learning_rate": 4.874724364825504e-05, | |
| "loss": 0.506, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.420796100731113, | |
| "grad_norm": 0.31047219756148736, | |
| "learning_rate": 4.8388335225372416e-05, | |
| "loss": 0.5052, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.4337936636880584, | |
| "grad_norm": 0.3050621953402528, | |
| "learning_rate": 4.802871898668237e-05, | |
| "loss": 0.5023, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.4467912266450043, | |
| "grad_norm": 0.20661927757896031, | |
| "learning_rate": 4.7668425276949546e-05, | |
| "loss": 0.5057, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.4597887896019497, | |
| "grad_norm": 0.26788233927573063, | |
| "learning_rate": 4.730748449810429e-05, | |
| "loss": 0.4958, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.472786352558895, | |
| "grad_norm": 0.2906205342326177, | |
| "learning_rate": 4.694592710667723e-05, | |
| "loss": 0.4963, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.4857839155158405, | |
| "grad_norm": 0.2580857847569525, | |
| "learning_rate": 4.658378361122936e-05, | |
| "loss": 0.5002, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.4987814784727864, | |
| "grad_norm": 0.22204319735300987, | |
| "learning_rate": 4.622108456977773e-05, | |
| "loss": 0.5036, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.511779041429732, | |
| "grad_norm": 0.20367464116828346, | |
| "learning_rate": 4.585786058721687e-05, | |
| "loss": 0.5, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.5247766043866777, | |
| "grad_norm": 0.25365421803002, | |
| "learning_rate": 4.549414231273633e-05, | |
| "loss": 0.5031, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.537774167343623, | |
| "grad_norm": 0.21606979350876157, | |
| "learning_rate": 4.512996043723453e-05, | |
| "loss": 0.5021, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.5507717303005686, | |
| "grad_norm": 0.19709293456409957, | |
| "learning_rate": 4.476534569072895e-05, | |
| "loss": 0.5019, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.563769293257514, | |
| "grad_norm": 0.17783473355390217, | |
| "learning_rate": 4.440032883976318e-05, | |
| "loss": 0.4991, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.57676685621446, | |
| "grad_norm": 0.24070649254308227, | |
| "learning_rate": 4.403494068481074e-05, | |
| "loss": 0.498, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.5897644191714053, | |
| "grad_norm": 0.19211194243534133, | |
| "learning_rate": 4.3669212057676145e-05, | |
| "loss": 0.5044, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.602761982128351, | |
| "grad_norm": 0.20459563241794576, | |
| "learning_rate": 4.33031738188933e-05, | |
| "loss": 0.4944, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.6157595450852966, | |
| "grad_norm": 0.21561311779498776, | |
| "learning_rate": 4.293685685512142e-05, | |
| "loss": 0.4991, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.628757108042242, | |
| "grad_norm": 0.24474595705388458, | |
| "learning_rate": 4.257029207653881e-05, | |
| "loss": 0.4982, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.6417546709991875, | |
| "grad_norm": 0.20720663848399937, | |
| "learning_rate": 4.220351041423462e-05, | |
| "loss": 0.4971, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.6547522339561334, | |
| "grad_norm": 0.21409555538090586, | |
| "learning_rate": 4.183654281759888e-05, | |
| "loss": 0.5074, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.667749796913079, | |
| "grad_norm": 0.18475729049078887, | |
| "learning_rate": 4.1469420251710905e-05, | |
| "loss": 0.5012, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.680747359870024, | |
| "grad_norm": 0.18985766250106262, | |
| "learning_rate": 4.110217369472649e-05, | |
| "loss": 0.5057, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.69374492282697, | |
| "grad_norm": 0.219164525252158, | |
| "learning_rate": 4.07348341352639e-05, | |
| "loss": 0.4938, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.7067424857839155, | |
| "grad_norm": 0.16436043278644338, | |
| "learning_rate": 4.0367432569789065e-05, | |
| "loss": 0.4941, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.719740048740861, | |
| "grad_norm": 0.16431610353215864, | |
| "learning_rate": 4e-05, | |
| "loss": 0.4908, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.732737611697807, | |
| "grad_norm": 0.18122165154966388, | |
| "learning_rate": 3.963256743021095e-05, | |
| "loss": 0.4948, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.7457351746547523, | |
| "grad_norm": 0.1895612667059973, | |
| "learning_rate": 3.92651658647361e-05, | |
| "loss": 0.4944, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.7587327376116977, | |
| "grad_norm": 0.1898133778685081, | |
| "learning_rate": 3.889782630527353e-05, | |
| "loss": 0.5017, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.7717303005686436, | |
| "grad_norm": 0.21805199186913155, | |
| "learning_rate": 3.853057974828911e-05, | |
| "loss": 0.5034, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.784727863525589, | |
| "grad_norm": 0.1831497580469028, | |
| "learning_rate": 3.816345718240113e-05, | |
| "loss": 0.4975, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.7977254264825344, | |
| "grad_norm": 0.18270730105554409, | |
| "learning_rate": 3.779648958576538e-05, | |
| "loss": 0.4978, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.81072298943948, | |
| "grad_norm": 0.1549961859919279, | |
| "learning_rate": 3.74297079234612e-05, | |
| "loss": 0.501, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.8237205523964257, | |
| "grad_norm": 0.17737516044768692, | |
| "learning_rate": 3.706314314487859e-05, | |
| "loss": 0.5033, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.836718115353371, | |
| "grad_norm": 0.15913489910963527, | |
| "learning_rate": 3.669682618110671e-05, | |
| "loss": 0.497, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.849715678310317, | |
| "grad_norm": 0.16296392045874272, | |
| "learning_rate": 3.6330787942323855e-05, | |
| "loss": 0.4976, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.8627132412672625, | |
| "grad_norm": 0.1730063706824666, | |
| "learning_rate": 3.5965059315189274e-05, | |
| "loss": 0.5008, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.875710804224208, | |
| "grad_norm": 0.18397850865887175, | |
| "learning_rate": 3.559967116023683e-05, | |
| "loss": 0.5017, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.8887083671811533, | |
| "grad_norm": 0.170486856095215, | |
| "learning_rate": 3.523465430927106e-05, | |
| "loss": 0.4989, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.901705930138099, | |
| "grad_norm": 0.18271802695583045, | |
| "learning_rate": 3.4870039562765475e-05, | |
| "loss": 0.4982, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.9147034930950446, | |
| "grad_norm": 0.17133512291595282, | |
| "learning_rate": 3.4505857687263675e-05, | |
| "loss": 0.4928, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.9277010560519905, | |
| "grad_norm": 0.1620913848424712, | |
| "learning_rate": 3.414213941278314e-05, | |
| "loss": 0.5019, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.940698619008936, | |
| "grad_norm": 0.17309647607036316, | |
| "learning_rate": 3.377891543022229e-05, | |
| "loss": 0.4969, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.9536961819658814, | |
| "grad_norm": 0.1552693867051102, | |
| "learning_rate": 3.341621638877064e-05, | |
| "loss": 0.5033, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.966693744922827, | |
| "grad_norm": 0.15716236217741233, | |
| "learning_rate": 3.305407289332279e-05, | |
| "loss": 0.4953, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.9796913078797727, | |
| "grad_norm": 0.14720876859213405, | |
| "learning_rate": 3.269251550189573e-05, | |
| "loss": 0.4963, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.992688870836718, | |
| "grad_norm": 0.1618888421317767, | |
| "learning_rate": 3.2331574723050474e-05, | |
| "loss": 0.5049, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.007311129163282, | |
| "grad_norm": 0.20996635969377356, | |
| "learning_rate": 3.197128101331764e-05, | |
| "loss": 0.4795, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.0203086921202273, | |
| "grad_norm": 0.19731076434312814, | |
| "learning_rate": 3.161166477462759e-05, | |
| "loss": 0.454, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.033306255077173, | |
| "grad_norm": 0.23256302661715916, | |
| "learning_rate": 3.125275635174497e-05, | |
| "loss": 0.4578, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.0463038180341186, | |
| "grad_norm": 0.27935225061925695, | |
| "learning_rate": 3.089458602970828e-05, | |
| "loss": 0.4508, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.059301380991064, | |
| "grad_norm": 0.2121038609311496, | |
| "learning_rate": 3.0537184031274306e-05, | |
| "loss": 0.4538, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.07229894394801, | |
| "grad_norm": 0.26051518619568637, | |
| "learning_rate": 3.0180580514368037e-05, | |
| "loss": 0.4607, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.0852965069049554, | |
| "grad_norm": 0.2335997625474098, | |
| "learning_rate": 2.9824805569537747e-05, | |
| "loss": 0.4503, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.098294069861901, | |
| "grad_norm": 0.2127303282473783, | |
| "learning_rate": 2.9469889217416045e-05, | |
| "loss": 0.4566, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.1112916328188467, | |
| "grad_norm": 0.2592389767469695, | |
| "learning_rate": 2.9115861406186593e-05, | |
| "loss": 0.455, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.124289195775792, | |
| "grad_norm": 0.21704890218608397, | |
| "learning_rate": 2.8762752009057232e-05, | |
| "loss": 0.4548, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.1372867587327375, | |
| "grad_norm": 0.20376985172351347, | |
| "learning_rate": 2.841059082173902e-05, | |
| "loss": 0.4518, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.150284321689683, | |
| "grad_norm": 0.2025335681556548, | |
| "learning_rate": 2.805940755993223e-05, | |
| "loss": 0.4551, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.163281884646629, | |
| "grad_norm": 0.18017393057483888, | |
| "learning_rate": 2.770923185681878e-05, | |
| "loss": 0.4552, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.1762794476035743, | |
| "grad_norm": 0.1713610793269365, | |
| "learning_rate": 2.7360093260561904e-05, | |
| "loss": 0.4554, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.1892770105605197, | |
| "grad_norm": 0.21406546378373156, | |
| "learning_rate": 2.7012021231812666e-05, | |
| "loss": 0.4548, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.2022745735174656, | |
| "grad_norm": 0.16116975404585604, | |
| "learning_rate": 2.6665045141224193e-05, | |
| "loss": 0.4524, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.215272136474411, | |
| "grad_norm": 0.18334052571241438, | |
| "learning_rate": 2.6319194266973256e-05, | |
| "loss": 0.4571, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.2282696994313564, | |
| "grad_norm": 0.16891681064170494, | |
| "learning_rate": 2.597449779228983e-05, | |
| "loss": 0.4486, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.2412672623883023, | |
| "grad_norm": 0.17020370829259132, | |
| "learning_rate": 2.563098480299451e-05, | |
| "loss": 0.4564, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.2542648253452477, | |
| "grad_norm": 0.15985371532053932, | |
| "learning_rate": 2.5288684285044283e-05, | |
| "loss": 0.4564, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.267262388302193, | |
| "grad_norm": 0.1774569936971813, | |
| "learning_rate": 2.4947625122086585e-05, | |
| "loss": 0.4558, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.280259951259139, | |
| "grad_norm": 0.16090847087848084, | |
| "learning_rate": 2.460783609302218e-05, | |
| "loss": 0.4591, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.2932575142160845, | |
| "grad_norm": 0.1975160669033593, | |
| "learning_rate": 2.4269345869576676e-05, | |
| "loss": 0.4532, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.30625507717303, | |
| "grad_norm": 0.1421754683033612, | |
| "learning_rate": 2.393218301388123e-05, | |
| "loss": 0.4524, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.319252640129976, | |
| "grad_norm": 0.16343078650300716, | |
| "learning_rate": 2.35963759760624e-05, | |
| "loss": 0.4499, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.332250203086921, | |
| "grad_norm": 0.12924948690527432, | |
| "learning_rate": 2.3261953091841553e-05, | |
| "loss": 0.4476, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.3452477660438666, | |
| "grad_norm": 0.13865627330009284, | |
| "learning_rate": 2.2928942580143855e-05, | |
| "loss": 0.4586, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.3582453290008125, | |
| "grad_norm": 0.1427053692811759, | |
| "learning_rate": 2.2597372540717083e-05, | |
| "loss": 0.4506, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.371242891957758, | |
| "grad_norm": 0.13955260210235942, | |
| "learning_rate": 2.226727095176057e-05, | |
| "loss": 0.459, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.3842404549147034, | |
| "grad_norm": 0.14070699605197182, | |
| "learning_rate": 2.1938665667564435e-05, | |
| "loss": 0.4516, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.3972380178716493, | |
| "grad_norm": 0.13770636964584296, | |
| "learning_rate": 2.1611584416159106e-05, | |
| "loss": 0.4612, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.4102355808285947, | |
| "grad_norm": 0.12849805538578324, | |
| "learning_rate": 2.1286054796975696e-05, | |
| "loss": 0.4553, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.42323314378554, | |
| "grad_norm": 0.13177484817311683, | |
| "learning_rate": 2.096210427851706e-05, | |
| "loss": 0.4514, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.436230706742486, | |
| "grad_norm": 0.13158205912989682, | |
| "learning_rate": 2.063976019604006e-05, | |
| "loss": 0.4505, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.4492282696994314, | |
| "grad_norm": 0.12631589704885543, | |
| "learning_rate": 2.0319049749248876e-05, | |
| "loss": 0.4593, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.462225832656377, | |
| "grad_norm": 0.12669568578055, | |
| "learning_rate": 2.0000000000000012e-05, | |
| "loss": 0.4489, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.4752233956133223, | |
| "grad_norm": 0.13593170946905256, | |
| "learning_rate": 1.9682637870018638e-05, | |
| "loss": 0.4521, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.488220958570268, | |
| "grad_norm": 0.1231076609355709, | |
| "learning_rate": 1.9366990138627054e-05, | |
| "loss": 0.4518, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.5012185215272136, | |
| "grad_norm": 0.13931076961686364, | |
| "learning_rate": 1.9053083440484887e-05, | |
| "loss": 0.4483, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.5142160844841595, | |
| "grad_norm": 0.12554367586858933, | |
| "learning_rate": 1.8740944263341773e-05, | |
| "loss": 0.455, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.527213647441105, | |
| "grad_norm": 0.14612094230459444, | |
| "learning_rate": 1.8430598945802156e-05, | |
| "loss": 0.4592, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 3.5402112103980503, | |
| "grad_norm": 0.1290431553328097, | |
| "learning_rate": 1.8122073675102935e-05, | |
| "loss": 0.4514, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 3.5532087733549957, | |
| "grad_norm": 0.12686093908604665, | |
| "learning_rate": 1.781539448490365e-05, | |
| "loss": 0.4522, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 3.5662063363119416, | |
| "grad_norm": 0.11863063600346366, | |
| "learning_rate": 1.7510587253089842e-05, | |
| "loss": 0.4576, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 3.579203899268887, | |
| "grad_norm": 0.12151731546430022, | |
| "learning_rate": 1.7207677699589355e-05, | |
| "loss": 0.4575, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.592201462225833, | |
| "grad_norm": 0.13465971496188542, | |
| "learning_rate": 1.690669138420215e-05, | |
| "loss": 0.4552, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 3.6051990251827783, | |
| "grad_norm": 0.12151589453070985, | |
| "learning_rate": 1.6607653704443457e-05, | |
| "loss": 0.4564, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 3.618196588139724, | |
| "grad_norm": 0.11553472109779886, | |
| "learning_rate": 1.6310589893400804e-05, | |
| "loss": 0.4513, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 3.631194151096669, | |
| "grad_norm": 0.1299593052415094, | |
| "learning_rate": 1.601552501760473e-05, | |
| "loss": 0.4628, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 3.644191714053615, | |
| "grad_norm": 0.11487631922937644, | |
| "learning_rate": 1.5722483974913737e-05, | |
| "loss": 0.4535, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.6571892770105605, | |
| "grad_norm": 0.1155778088636727, | |
| "learning_rate": 1.5431491492413288e-05, | |
| "loss": 0.4525, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.670186839967506, | |
| "grad_norm": 0.10917381391292548, | |
| "learning_rate": 1.5142572124329418e-05, | |
| "loss": 0.4545, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.683184402924452, | |
| "grad_norm": 0.112815007140388, | |
| "learning_rate": 1.4855750249956718e-05, | |
| "loss": 0.455, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.6961819658813972, | |
| "grad_norm": 0.10952932306806101, | |
| "learning_rate": 1.457105007160129e-05, | |
| "loss": 0.4603, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.7091795288383427, | |
| "grad_norm": 0.11933650657176735, | |
| "learning_rate": 1.4288495612538427e-05, | |
| "loss": 0.4561, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.722177091795288, | |
| "grad_norm": 0.11966615448159834, | |
| "learning_rate": 1.4008110714985623e-05, | |
| "loss": 0.458, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.735174654752234, | |
| "grad_norm": 0.11500621723286121, | |
| "learning_rate": 1.3729919038090627e-05, | |
| "loss": 0.4544, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.7481722177091794, | |
| "grad_norm": 0.1261016043557776, | |
| "learning_rate": 1.3453944055935151e-05, | |
| "loss": 0.4567, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.7611697806661253, | |
| "grad_norm": 0.11328895828363872, | |
| "learning_rate": 1.3180209055554043e-05, | |
| "loss": 0.4575, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.7741673436230707, | |
| "grad_norm": 0.11555707128389255, | |
| "learning_rate": 1.2908737134970367e-05, | |
| "loss": 0.4582, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.787164906580016, | |
| "grad_norm": 0.11426549760151167, | |
| "learning_rate": 1.2639551201246278e-05, | |
| "loss": 0.4532, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 3.8001624695369616, | |
| "grad_norm": 0.11450671603412013, | |
| "learning_rate": 1.2372673968550229e-05, | |
| "loss": 0.4492, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 3.8131600324939074, | |
| "grad_norm": 0.10461893290927064, | |
| "learning_rate": 1.2108127956240186e-05, | |
| "loss": 0.4484, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 3.826157595450853, | |
| "grad_norm": 0.11592861392639016, | |
| "learning_rate": 1.1845935486963546e-05, | |
| "loss": 0.4533, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 3.8391551584077988, | |
| "grad_norm": 0.10916274841615102, | |
| "learning_rate": 1.158611868477344e-05, | |
| "loss": 0.4499, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.852152721364744, | |
| "grad_norm": 0.11085565295793819, | |
| "learning_rate": 1.1328699473261957e-05, | |
| "loss": 0.4475, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.8651502843216896, | |
| "grad_norm": 0.11270127433420311, | |
| "learning_rate": 1.107369957371013e-05, | |
| "loss": 0.46, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.878147847278635, | |
| "grad_norm": 0.10798343390475132, | |
| "learning_rate": 1.0821140503255174e-05, | |
| "loss": 0.4493, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 3.891145410235581, | |
| "grad_norm": 0.10573884600589767, | |
| "learning_rate": 1.0571043573074737e-05, | |
| "loss": 0.4486, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 3.9041429731925263, | |
| "grad_norm": 0.10696680010317415, | |
| "learning_rate": 1.0323429886588743e-05, | |
| "loss": 0.4571, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.917140536149472, | |
| "grad_norm": 0.10555914364807507, | |
| "learning_rate": 1.0078320337678584e-05, | |
| "loss": 0.4487, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 3.9301380991064176, | |
| "grad_norm": 0.11174895825890044, | |
| "learning_rate": 9.835735608924155e-06, | |
| "loss": 0.4548, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 3.943135662063363, | |
| "grad_norm": 0.11266935997454287, | |
| "learning_rate": 9.595696169858542e-06, | |
| "loss": 0.4535, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 3.9561332250203085, | |
| "grad_norm": 0.09920882966315055, | |
| "learning_rate": 9.358222275240884e-06, | |
| "loss": 0.4556, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 3.9691307879772544, | |
| "grad_norm": 0.1126209630439109, | |
| "learning_rate": 9.123333963347166e-06, | |
| "loss": 0.4579, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.9821283509342, | |
| "grad_norm": 0.10755570639650616, | |
| "learning_rate": 8.89105105427945e-06, | |
| "loss": 0.4516, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 3.9951259138911457, | |
| "grad_norm": 0.10369220468763879, | |
| "learning_rate": 8.661393148293355e-06, | |
| "loss": 0.453, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.0097481722177095, | |
| "grad_norm": 0.16316827455729266, | |
| "learning_rate": 8.434379624144261e-06, | |
| "loss": 0.4399, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.022745735174655, | |
| "grad_norm": 0.1627430975816477, | |
| "learning_rate": 8.210029637452016e-06, | |
| "loss": 0.4236, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.0357432981316, | |
| "grad_norm": 0.11843109260911805, | |
| "learning_rate": 7.988362119084642e-06, | |
| "loss": 0.4246, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.048740861088546, | |
| "grad_norm": 0.12489995078048981, | |
| "learning_rate": 7.769395773560874e-06, | |
| "loss": 0.4281, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.061738424045491, | |
| "grad_norm": 0.14961111092042415, | |
| "learning_rate": 7.553149077471915e-06, | |
| "loss": 0.4314, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.074735987002437, | |
| "grad_norm": 0.16631212545248225, | |
| "learning_rate": 7.3396402779222845e-06, | |
| "loss": 0.4285, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.087733549959383, | |
| "grad_norm": 0.14353822134082916, | |
| "learning_rate": 7.128887390990198e-06, | |
| "loss": 0.4218, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.100731112916328, | |
| "grad_norm": 0.1378750355216543, | |
| "learning_rate": 6.9209082002072725e-06, | |
| "loss": 0.4251, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.113728675873274, | |
| "grad_norm": 0.12807424487959607, | |
| "learning_rate": 6.715720255058e-06, | |
| "loss": 0.434, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.126726238830219, | |
| "grad_norm": 0.13015531066132296, | |
| "learning_rate": 6.513340869498859e-06, | |
| "loss": 0.4246, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.139723801787165, | |
| "grad_norm": 0.14514193079110016, | |
| "learning_rate": 6.313787120497376e-06, | |
| "loss": 0.4288, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.15272136474411, | |
| "grad_norm": 0.1366568470719478, | |
| "learning_rate": 6.117075846591123e-06, | |
| "loss": 0.4236, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.1657189277010565, | |
| "grad_norm": 0.11930045160597687, | |
| "learning_rate": 5.923223646466923e-06, | |
| "loss": 0.4258, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.178716490658002, | |
| "grad_norm": 0.11338899398667109, | |
| "learning_rate": 5.732246877560146e-06, | |
| "loss": 0.4257, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.191714053614947, | |
| "grad_norm": 0.12656852484885386, | |
| "learning_rate": 5.5441616546745646e-06, | |
| "loss": 0.4263, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.204711616571893, | |
| "grad_norm": 0.1203349846807835, | |
| "learning_rate": 5.358983848622452e-06, | |
| "loss": 0.4243, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.217709179528838, | |
| "grad_norm": 0.11202043270158919, | |
| "learning_rate": 5.176729084885508e-06, | |
| "loss": 0.4278, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.230706742485784, | |
| "grad_norm": 0.10971697060308855, | |
| "learning_rate": 4.99741274229625e-06, | |
| "loss": 0.4253, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.24370430544273, | |
| "grad_norm": 0.11451882024388883, | |
| "learning_rate": 4.821049951740442e-06, | |
| "loss": 0.4255, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.256701868399675, | |
| "grad_norm": 0.11501774277849335, | |
| "learning_rate": 4.647655594880225e-06, | |
| "loss": 0.4273, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.269699431356621, | |
| "grad_norm": 0.10856387260845814, | |
| "learning_rate": 4.4772443028985004e-06, | |
| "loss": 0.4327, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.282696994313566, | |
| "grad_norm": 0.10662919577972961, | |
| "learning_rate": 4.3098304552642385e-06, | |
| "loss": 0.4295, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.295694557270512, | |
| "grad_norm": 0.10569714582518992, | |
| "learning_rate": 4.1454281785191995e-06, | |
| "loss": 0.4235, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.308692120227457, | |
| "grad_norm": 0.10668551395374147, | |
| "learning_rate": 3.984051345085855e-06, | |
| "loss": 0.4259, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.3216896831844025, | |
| "grad_norm": 0.11247906580591396, | |
| "learning_rate": 3.825713572096903e-06, | |
| "loss": 0.4205, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.334687246141349, | |
| "grad_norm": 0.09984629980789353, | |
| "learning_rate": 3.6704282202461515e-06, | |
| "loss": 0.4263, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.347684809098294, | |
| "grad_norm": 0.09978390449414096, | |
| "learning_rate": 3.518208392661184e-06, | |
| "loss": 0.424, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.36068237205524, | |
| "grad_norm": 0.10677309740013617, | |
| "learning_rate": 3.3690669337977e-06, | |
| "loss": 0.422, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.373679935012185, | |
| "grad_norm": 0.09996882970996637, | |
| "learning_rate": 3.2230164283556918e-06, | |
| "loss": 0.4236, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 4.3866774979691305, | |
| "grad_norm": 0.09578924808601377, | |
| "learning_rate": 3.080069200217497e-06, | |
| "loss": 0.4257, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 4.399675060926076, | |
| "grad_norm": 0.09396778955504816, | |
| "learning_rate": 2.9402373114079295e-06, | |
| "loss": 0.4253, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 4.412672623883022, | |
| "grad_norm": 0.0947968037466629, | |
| "learning_rate": 2.803532561076492e-06, | |
| "loss": 0.431, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 4.425670186839968, | |
| "grad_norm": 0.09946131836401616, | |
| "learning_rate": 2.669966484501716e-06, | |
| "loss": 0.4306, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.438667749796913, | |
| "grad_norm": 0.09626077532608296, | |
| "learning_rate": 2.5395503521178143e-06, | |
| "loss": 0.4266, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 4.451665312753859, | |
| "grad_norm": 0.13517213941202075, | |
| "learning_rate": 2.4122951685636674e-06, | |
| "loss": 0.4303, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 4.464662875710804, | |
| "grad_norm": 0.0920475491242233, | |
| "learning_rate": 2.2882116717542634e-06, | |
| "loss": 0.431, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 4.477660438667749, | |
| "grad_norm": 0.09105224683628121, | |
| "learning_rate": 2.1673103319746146e-06, | |
| "loss": 0.4238, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 4.490658001624696, | |
| "grad_norm": 0.09216193209397198, | |
| "learning_rate": 2.049601350996233e-06, | |
| "loss": 0.423, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 4.503655564581641, | |
| "grad_norm": 0.0943905757756607, | |
| "learning_rate": 1.93509466121633e-06, | |
| "loss": 0.4243, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 4.516653127538587, | |
| "grad_norm": 0.09735611823252567, | |
| "learning_rate": 1.8237999248197002e-06, | |
| "loss": 0.4277, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 4.529650690495532, | |
| "grad_norm": 0.09645430457572284, | |
| "learning_rate": 1.7157265329634354e-06, | |
| "loss": 0.4236, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 4.5426482534524775, | |
| "grad_norm": 0.0920756263404523, | |
| "learning_rate": 1.6108836049844434e-06, | |
| "loss": 0.4288, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 4.555645816409423, | |
| "grad_norm": 0.09448443193483406, | |
| "learning_rate": 1.5092799876299835e-06, | |
| "loss": 0.4333, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.568643379366369, | |
| "grad_norm": 0.09168940218519567, | |
| "learning_rate": 1.4109242543111834e-06, | |
| "loss": 0.428, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 4.581640942323315, | |
| "grad_norm": 0.09340613292838272, | |
| "learning_rate": 1.3158247043795735e-06, | |
| "loss": 0.4203, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 4.59463850528026, | |
| "grad_norm": 0.08973994262732386, | |
| "learning_rate": 1.2239893624267852e-06, | |
| "loss": 0.4263, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 4.6076360682372055, | |
| "grad_norm": 0.09429472748256586, | |
| "learning_rate": 1.1354259776074472e-06, | |
| "loss": 0.4229, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 4.620633631194151, | |
| "grad_norm": 0.09055346761294361, | |
| "learning_rate": 1.050142022985292e-06, | |
| "loss": 0.4253, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 4.633631194151096, | |
| "grad_norm": 0.09062612322699877, | |
| "learning_rate": 9.681446949025752e-07, | |
| "loss": 0.4245, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 4.646628757108042, | |
| "grad_norm": 0.09042042924091588, | |
| "learning_rate": 8.89440912372832e-07, | |
| "loss": 0.4179, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 4.659626320064988, | |
| "grad_norm": 0.09194481794235952, | |
| "learning_rate": 8.140373164970428e-07, | |
| "loss": 0.4301, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 4.6726238830219335, | |
| "grad_norm": 0.09366397128826567, | |
| "learning_rate": 7.419402699032852e-07, | |
| "loss": 0.4227, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 4.685621445978879, | |
| "grad_norm": 0.08821057658897659, | |
| "learning_rate": 6.731558562097995e-07, | |
| "loss": 0.4264, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.698619008935824, | |
| "grad_norm": 0.09083442802058574, | |
| "learning_rate": 6.076898795116792e-07, | |
| "loss": 0.4265, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 4.71161657189277, | |
| "grad_norm": 0.09003063615517265, | |
| "learning_rate": 5.455478638911071e-07, | |
| "loss": 0.4272, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 4.724614134849716, | |
| "grad_norm": 0.08945212233339732, | |
| "learning_rate": 4.867350529512261e-07, | |
| "loss": 0.4204, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 4.737611697806662, | |
| "grad_norm": 0.08734995272765789, | |
| "learning_rate": 4.3125640937368373e-07, | |
| "loss": 0.4274, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 4.750609260763607, | |
| "grad_norm": 0.09082893124848684, | |
| "learning_rate": 3.791166144998704e-07, | |
| "loss": 0.4248, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 4.763606823720552, | |
| "grad_norm": 0.08806282209071367, | |
| "learning_rate": 3.3032006793590977e-07, | |
| "loss": 0.4243, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 4.776604386677498, | |
| "grad_norm": 0.09048478332231094, | |
| "learning_rate": 2.848708871814054e-07, | |
| "loss": 0.4267, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 4.789601949634443, | |
| "grad_norm": 0.08924885575111106, | |
| "learning_rate": 2.4277290728202063e-07, | |
| "loss": 0.4217, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 4.802599512591389, | |
| "grad_norm": 0.0892092425894652, | |
| "learning_rate": 2.040296805058528e-07, | |
| "loss": 0.4332, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 4.815597075548335, | |
| "grad_norm": 0.09257254403975926, | |
| "learning_rate": 1.6864447604370004e-07, | |
| "loss": 0.4316, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 4.8285946385052805, | |
| "grad_norm": 0.08603627784307787, | |
| "learning_rate": 1.3662027973320614e-07, | |
| "loss": 0.4325, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 4.841592201462226, | |
| "grad_norm": 0.0896720009488906, | |
| "learning_rate": 1.0795979380690657e-07, | |
| "loss": 0.4198, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 4.854589764419171, | |
| "grad_norm": 0.08869591858273997, | |
| "learning_rate": 8.266543666421544e-08, | |
| "loss": 0.4187, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 4.867587327376117, | |
| "grad_norm": 0.08761681066208689, | |
| "learning_rate": 6.073934266735303e-08, | |
| "loss": 0.4251, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 4.880584890333062, | |
| "grad_norm": 0.08864693759754272, | |
| "learning_rate": 4.218336196125439e-08, | |
| "loss": 0.4287, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 4.8935824532900085, | |
| "grad_norm": 0.08657763165853864, | |
| "learning_rate": 2.699906031745414e-08, | |
| "loss": 0.4247, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 4.906580016246954, | |
| "grad_norm": 0.08858764828344305, | |
| "learning_rate": 1.5187719001943378e-08, | |
| "loss": 0.4235, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 4.919577579203899, | |
| "grad_norm": 0.08758061054007635, | |
| "learning_rate": 6.750334667091629e-09, | |
| "loss": 0.4252, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 4.932575142160845, | |
| "grad_norm": 0.09004668669428884, | |
| "learning_rate": 1.6876192675052695e-09, | |
| "loss": 0.429, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 4.94557270511779, | |
| "grad_norm": 0.08613429520636201, | |
| "learning_rate": 0.0, | |
| "loss": 0.4249, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 4.94557270511779, | |
| "step": 380, | |
| "total_flos": 9.778530867524665e+18, | |
| "train_loss": 0.5330901978047271, | |
| "train_runtime": 39276.6384, | |
| "train_samples_per_second": 5.012, | |
| "train_steps_per_second": 0.01 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 380, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.778530867524665e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |