| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.94557270511779, | |
| "eval_steps": 500, | |
| "global_step": 380, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012997562956945572, | |
| "grad_norm": 6.099783948625711, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 1.0219, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.025995125913891144, | |
| "grad_norm": 6.068915599090855, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 1.0148, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03899268887083672, | |
| "grad_norm": 5.594712813142529, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 0.9975, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05199025182778229, | |
| "grad_norm": 4.038714211131839, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 0.9696, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06498781478472786, | |
| "grad_norm": 2.365897221739178, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.9322, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07798537774167344, | |
| "grad_norm": 4.808874284957715, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 0.9538, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09098294069861901, | |
| "grad_norm": 4.840119194557309, | |
| "learning_rate": 1.4736842105263159e-05, | |
| "loss": 0.9456, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10398050365556458, | |
| "grad_norm": 5.7169851588504645, | |
| "learning_rate": 1.6842105263157896e-05, | |
| "loss": 0.9349, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11697806661251016, | |
| "grad_norm": 4.120707061579036, | |
| "learning_rate": 1.894736842105263e-05, | |
| "loss": 0.9107, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.12997562956945571, | |
| "grad_norm": 2.7497360309830476, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 0.8669, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1429731925264013, | |
| "grad_norm": 1.8093148555009293, | |
| "learning_rate": 2.3157894736842107e-05, | |
| "loss": 0.8289, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15597075548334688, | |
| "grad_norm": 1.617836052019221, | |
| "learning_rate": 2.526315789473684e-05, | |
| "loss": 0.8096, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.16896831844029245, | |
| "grad_norm": 1.0744851220126015, | |
| "learning_rate": 2.7368421052631583e-05, | |
| "loss": 0.7877, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18196588139723802, | |
| "grad_norm": 1.2705200346488472, | |
| "learning_rate": 2.9473684210526317e-05, | |
| "loss": 0.78, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19496344435418358, | |
| "grad_norm": 1.4130680513032683, | |
| "learning_rate": 3.157894736842106e-05, | |
| "loss": 0.7769, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.20796100731112915, | |
| "grad_norm": 1.324813747526517, | |
| "learning_rate": 3.368421052631579e-05, | |
| "loss": 0.7705, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.22095857026807472, | |
| "grad_norm": 1.2590819742707091, | |
| "learning_rate": 3.578947368421053e-05, | |
| "loss": 0.7458, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23395613322502032, | |
| "grad_norm": 1.2135489907048478, | |
| "learning_rate": 3.789473684210526e-05, | |
| "loss": 0.7415, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2469536961819659, | |
| "grad_norm": 0.955476297443492, | |
| "learning_rate": 4e-05, | |
| "loss": 0.7311, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.25995125913891143, | |
| "grad_norm": 1.6971617462330089, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 0.7413, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.272948822095857, | |
| "grad_norm": 1.4395879957659508, | |
| "learning_rate": 4.421052631578948e-05, | |
| "loss": 0.7295, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2859463850528026, | |
| "grad_norm": 1.171007731697555, | |
| "learning_rate": 4.6315789473684214e-05, | |
| "loss": 0.7153, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.29894394800974816, | |
| "grad_norm": 1.931104084566943, | |
| "learning_rate": 4.842105263157895e-05, | |
| "loss": 0.7249, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.31194151096669376, | |
| "grad_norm": 1.1062671298809976, | |
| "learning_rate": 5.052631578947368e-05, | |
| "loss": 0.7178, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3249390739236393, | |
| "grad_norm": 2.0237024200236986, | |
| "learning_rate": 5.263157894736843e-05, | |
| "loss": 0.7214, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3379366368805849, | |
| "grad_norm": 1.6595941751589156, | |
| "learning_rate": 5.4736842105263165e-05, | |
| "loss": 0.702, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35093419983753044, | |
| "grad_norm": 1.643241943721994, | |
| "learning_rate": 5.68421052631579e-05, | |
| "loss": 0.7054, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.36393176279447603, | |
| "grad_norm": 1.9552851038948404, | |
| "learning_rate": 5.8947368421052634e-05, | |
| "loss": 0.7013, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.37692932575142163, | |
| "grad_norm": 1.1207858673487592, | |
| "learning_rate": 6.105263157894738e-05, | |
| "loss": 0.6901, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.38992688870836717, | |
| "grad_norm": 2.1963019920042757, | |
| "learning_rate": 6.315789473684212e-05, | |
| "loss": 0.6868, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.40292445166531277, | |
| "grad_norm": 1.4781504300412824, | |
| "learning_rate": 6.526315789473685e-05, | |
| "loss": 0.6903, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4159220146222583, | |
| "grad_norm": 2.122635652457263, | |
| "learning_rate": 6.736842105263159e-05, | |
| "loss": 0.6858, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4289195775792039, | |
| "grad_norm": 1.650492771547578, | |
| "learning_rate": 6.947368421052632e-05, | |
| "loss": 0.6898, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.44191714053614944, | |
| "grad_norm": 1.5236371764651064, | |
| "learning_rate": 7.157894736842105e-05, | |
| "loss": 0.6813, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.45491470349309504, | |
| "grad_norm": 1.185803755084194, | |
| "learning_rate": 7.368421052631579e-05, | |
| "loss": 0.6829, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.46791226645004064, | |
| "grad_norm": 1.9405604072257883, | |
| "learning_rate": 7.578947368421052e-05, | |
| "loss": 0.6738, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4809098294069862, | |
| "grad_norm": 1.363411287783038, | |
| "learning_rate": 7.789473684210527e-05, | |
| "loss": 0.6696, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4939073923639318, | |
| "grad_norm": 2.0874162228533115, | |
| "learning_rate": 8e-05, | |
| "loss": 0.6753, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5069049553208773, | |
| "grad_norm": 1.8491550544598394, | |
| "learning_rate": 7.99983123807325e-05, | |
| "loss": 0.6723, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5199025182778229, | |
| "grad_norm": 1.2473648539637896, | |
| "learning_rate": 7.999324966533291e-05, | |
| "loss": 0.66, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5329000812347685, | |
| "grad_norm": 3.3198725973160084, | |
| "learning_rate": 7.998481228099806e-05, | |
| "loss": 0.675, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.545897644191714, | |
| "grad_norm": 2.604690756502679, | |
| "learning_rate": 7.997300093968255e-05, | |
| "loss": 0.6824, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5588952071486596, | |
| "grad_norm": 2.02050667371959, | |
| "learning_rate": 7.995781663803876e-05, | |
| "loss": 0.6667, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5718927701056052, | |
| "grad_norm": 1.1560247057038098, | |
| "learning_rate": 7.993926065733265e-05, | |
| "loss": 0.6619, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.5848903330625508, | |
| "grad_norm": 1.5367530120814719, | |
| "learning_rate": 7.991733456333579e-05, | |
| "loss": 0.6664, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5978878960194963, | |
| "grad_norm": 1.1634220605553418, | |
| "learning_rate": 7.98920402061931e-05, | |
| "loss": 0.6493, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6108854589764419, | |
| "grad_norm": 1.015353240524831, | |
| "learning_rate": 7.98633797202668e-05, | |
| "loss": 0.659, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6238830219333875, | |
| "grad_norm": 1.4952317667466968, | |
| "learning_rate": 7.98313555239563e-05, | |
| "loss": 0.6609, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6368805848903331, | |
| "grad_norm": 0.8908692378681831, | |
| "learning_rate": 7.979597031949415e-05, | |
| "loss": 0.6638, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6498781478472786, | |
| "grad_norm": 1.7121745994414552, | |
| "learning_rate": 7.975722709271799e-05, | |
| "loss": 0.6636, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6628757108042242, | |
| "grad_norm": 1.2827332360089716, | |
| "learning_rate": 7.97151291128186e-05, | |
| "loss": 0.6562, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6758732737611698, | |
| "grad_norm": 1.1190183435245546, | |
| "learning_rate": 7.96696799320641e-05, | |
| "loss": 0.6467, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.6888708367181153, | |
| "grad_norm": 1.5796475425952419, | |
| "learning_rate": 7.962088338550013e-05, | |
| "loss": 0.6549, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7018683996750609, | |
| "grad_norm": 1.0356021868877745, | |
| "learning_rate": 7.956874359062632e-05, | |
| "loss": 0.6486, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7148659626320065, | |
| "grad_norm": 1.689157245211179, | |
| "learning_rate": 7.951326494704878e-05, | |
| "loss": 0.6427, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7278635255889521, | |
| "grad_norm": 1.3036787434531885, | |
| "learning_rate": 7.94544521361089e-05, | |
| "loss": 0.6451, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7408610885458976, | |
| "grad_norm": 1.411604156835551, | |
| "learning_rate": 7.939231012048833e-05, | |
| "loss": 0.6434, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7538586515028433, | |
| "grad_norm": 1.073329384486516, | |
| "learning_rate": 7.932684414379021e-05, | |
| "loss": 0.6417, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7668562144597888, | |
| "grad_norm": 1.1364966234135832, | |
| "learning_rate": 7.925805973009672e-05, | |
| "loss": 0.6348, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7798537774167343, | |
| "grad_norm": 0.9594628748904098, | |
| "learning_rate": 7.918596268350296e-05, | |
| "loss": 0.6399, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.7928513403736799, | |
| "grad_norm": 1.3124733925076593, | |
| "learning_rate": 7.911055908762718e-05, | |
| "loss": 0.6445, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8058489033306255, | |
| "grad_norm": 0.8150161261530253, | |
| "learning_rate": 7.903185530509743e-05, | |
| "loss": 0.6333, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8188464662875711, | |
| "grad_norm": 0.8675788545202524, | |
| "learning_rate": 7.894985797701472e-05, | |
| "loss": 0.6342, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8318440292445166, | |
| "grad_norm": 0.8701303058033523, | |
| "learning_rate": 7.886457402239256e-05, | |
| "loss": 0.627, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8448415922014623, | |
| "grad_norm": 1.1211379390083465, | |
| "learning_rate": 7.877601063757323e-05, | |
| "loss": 0.6346, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.8578391551584078, | |
| "grad_norm": 1.0237659842661562, | |
| "learning_rate": 7.868417529562043e-05, | |
| "loss": 0.6244, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8708367181153533, | |
| "grad_norm": 0.8724494077448317, | |
| "learning_rate": 7.858907574568882e-05, | |
| "loss": 0.6279, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8838342810722989, | |
| "grad_norm": 0.8890596583749399, | |
| "learning_rate": 7.849072001237001e-05, | |
| "loss": 0.6212, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.8968318440292445, | |
| "grad_norm": 0.9491308234698868, | |
| "learning_rate": 7.838911639501557e-05, | |
| "loss": 0.6164, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9098294069861901, | |
| "grad_norm": 0.9144632388033332, | |
| "learning_rate": 7.828427346703657e-05, | |
| "loss": 0.6141, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9228269699431356, | |
| "grad_norm": 1.0369164699296847, | |
| "learning_rate": 7.81762000751803e-05, | |
| "loss": 0.6227, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9358245329000813, | |
| "grad_norm": 1.0824992143715033, | |
| "learning_rate": 7.806490533878368e-05, | |
| "loss": 0.6219, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9488220958570268, | |
| "grad_norm": 0.8841365960283825, | |
| "learning_rate": 7.795039864900378e-05, | |
| "loss": 0.6134, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9618196588139724, | |
| "grad_norm": 0.6911046945305666, | |
| "learning_rate": 7.783268966802539e-05, | |
| "loss": 0.6263, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.974817221770918, | |
| "grad_norm": 0.7831186784164668, | |
| "learning_rate": 7.771178832824573e-05, | |
| "loss": 0.6133, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9878147847278635, | |
| "grad_norm": 0.6782592588604553, | |
| "learning_rate": 7.758770483143634e-05, | |
| "loss": 0.6102, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.0024370430544274, | |
| "grad_norm": 0.6336474343134532, | |
| "learning_rate": 7.74604496478822e-05, | |
| "loss": 0.6116, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0154346060113728, | |
| "grad_norm": 0.6278277714102893, | |
| "learning_rate": 7.733003351549829e-05, | |
| "loss": 0.5828, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0284321689683185, | |
| "grad_norm": 0.6209449077089193, | |
| "learning_rate": 7.719646743892352e-05, | |
| "loss": 0.584, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.0414297319252641, | |
| "grad_norm": 0.73979662934484, | |
| "learning_rate": 7.705976268859207e-05, | |
| "loss": 0.5892, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0544272948822095, | |
| "grad_norm": 0.8782533057922014, | |
| "learning_rate": 7.691993079978252e-05, | |
| "loss": 0.5809, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0674248578391552, | |
| "grad_norm": 0.9723345888414255, | |
| "learning_rate": 7.677698357164431e-05, | |
| "loss": 0.5745, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0804224207961006, | |
| "grad_norm": 0.9060339939969984, | |
| "learning_rate": 7.663093306620231e-05, | |
| "loss": 0.5768, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0934199837530463, | |
| "grad_norm": 0.6635148814822602, | |
| "learning_rate": 7.648179160733883e-05, | |
| "loss": 0.5735, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.106417546709992, | |
| "grad_norm": 0.626645192938145, | |
| "learning_rate": 7.632957177975387e-05, | |
| "loss": 0.5774, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1194151096669374, | |
| "grad_norm": 0.5880549876089324, | |
| "learning_rate": 7.61742864279031e-05, | |
| "loss": 0.5806, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.132412672623883, | |
| "grad_norm": 0.7256826177123274, | |
| "learning_rate": 7.601594865491414e-05, | |
| "loss": 0.5725, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.1454102355808287, | |
| "grad_norm": 1.0226725048413887, | |
| "learning_rate": 7.585457182148081e-05, | |
| "loss": 0.5766, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.158407798537774, | |
| "grad_norm": 1.053827253244498, | |
| "learning_rate": 7.569016954473577e-05, | |
| "loss": 0.5781, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.1714053614947197, | |
| "grad_norm": 0.9313744365954294, | |
| "learning_rate": 7.552275569710152e-05, | |
| "loss": 0.5826, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1844029244516654, | |
| "grad_norm": 1.054632648598746, | |
| "learning_rate": 7.535234440511979e-05, | |
| "loss": 0.581, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.1974004874086108, | |
| "grad_norm": 0.8836759025467023, | |
| "learning_rate": 7.517895004825956e-05, | |
| "loss": 0.5821, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2103980503655565, | |
| "grad_norm": 1.127228102294561, | |
| "learning_rate": 7.500258725770375e-05, | |
| "loss": 0.5761, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2233956133225021, | |
| "grad_norm": 0.8941981850633482, | |
| "learning_rate": 7.48232709151145e-05, | |
| "loss": 0.5695, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2363931762794476, | |
| "grad_norm": 0.820917113713941, | |
| "learning_rate": 7.464101615137756e-05, | |
| "loss": 0.5805, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.2493907392363932, | |
| "grad_norm": 0.9174411331061607, | |
| "learning_rate": 7.445583834532546e-05, | |
| "loss": 0.5649, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2623883021933389, | |
| "grad_norm": 0.6500765370181957, | |
| "learning_rate": 7.426775312243986e-05, | |
| "loss": 0.5758, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.2753858651502843, | |
| "grad_norm": 0.763425847678293, | |
| "learning_rate": 7.407677635353308e-05, | |
| "loss": 0.574, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.28838342810723, | |
| "grad_norm": 0.8352567885058773, | |
| "learning_rate": 7.388292415340888e-05, | |
| "loss": 0.5685, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3013809910641756, | |
| "grad_norm": 0.4826298499702842, | |
| "learning_rate": 7.368621287950264e-05, | |
| "loss": 0.5645, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.314378554021121, | |
| "grad_norm": 0.5903830880665961, | |
| "learning_rate": 7.348665913050115e-05, | |
| "loss": 0.5602, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.3273761169780667, | |
| "grad_norm": 0.790277966015901, | |
| "learning_rate": 7.328427974494201e-05, | |
| "loss": 0.5676, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.340373679935012, | |
| "grad_norm": 0.7512617541518178, | |
| "learning_rate": 7.307909179979274e-05, | |
| "loss": 0.5648, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.3533712428919578, | |
| "grad_norm": 0.5851295705571637, | |
| "learning_rate": 7.28711126090098e-05, | |
| "loss": 0.5651, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.3663688058489034, | |
| "grad_norm": 0.41913986382054075, | |
| "learning_rate": 7.266035972207773e-05, | |
| "loss": 0.5572, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.3793663688058488, | |
| "grad_norm": 0.3907177567251715, | |
| "learning_rate": 7.24468509225281e-05, | |
| "loss": 0.5689, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.3923639317627945, | |
| "grad_norm": 0.510204926248653, | |
| "learning_rate": 7.223060422643914e-05, | |
| "loss": 0.5685, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.40536149471974, | |
| "grad_norm": 0.4592923316364611, | |
| "learning_rate": 7.201163788091536e-05, | |
| "loss": 0.5669, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4183590576766856, | |
| "grad_norm": 0.6513129221318326, | |
| "learning_rate": 7.178997036254799e-05, | |
| "loss": 0.5681, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.4313566206336312, | |
| "grad_norm": 0.7772542110813966, | |
| "learning_rate": 7.156562037585576e-05, | |
| "loss": 0.5661, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4443541835905767, | |
| "grad_norm": 0.6513876710701023, | |
| "learning_rate": 7.133860685170665e-05, | |
| "loss": 0.5736, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4573517465475223, | |
| "grad_norm": 0.576315717747176, | |
| "learning_rate": 7.110894894572056e-05, | |
| "loss": 0.5641, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.470349309504468, | |
| "grad_norm": 0.5234871451279655, | |
| "learning_rate": 7.087666603665284e-05, | |
| "loss": 0.5606, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.4833468724614134, | |
| "grad_norm": 0.5433209657397478, | |
| "learning_rate": 7.064177772475912e-05, | |
| "loss": 0.5677, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.496344435418359, | |
| "grad_norm": 0.5010015862310137, | |
| "learning_rate": 7.040430383014146e-05, | |
| "loss": 0.5653, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.5093419983753047, | |
| "grad_norm": 0.48832775679905743, | |
| "learning_rate": 7.016426439107586e-05, | |
| "loss": 0.5644, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5223395613322501, | |
| "grad_norm": 0.6320240467713527, | |
| "learning_rate": 6.992167966232143e-05, | |
| "loss": 0.5567, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5353371242891958, | |
| "grad_norm": 0.6482838878203654, | |
| "learning_rate": 6.967657011341126e-05, | |
| "loss": 0.5749, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.5483346872461414, | |
| "grad_norm": 0.4549530011344001, | |
| "learning_rate": 6.942895642692527e-05, | |
| "loss": 0.561, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.5613322502030869, | |
| "grad_norm": 0.32795847922028915, | |
| "learning_rate": 6.917885949674483e-05, | |
| "loss": 0.565, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.5743298131600325, | |
| "grad_norm": 0.4131817811198266, | |
| "learning_rate": 6.892630042628988e-05, | |
| "loss": 0.5559, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.5873273761169782, | |
| "grad_norm": 0.34923043241455753, | |
| "learning_rate": 6.867130052673806e-05, | |
| "loss": 0.5627, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.6003249390739236, | |
| "grad_norm": 0.4012122766550806, | |
| "learning_rate": 6.841388131522656e-05, | |
| "loss": 0.561, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.6133225020308692, | |
| "grad_norm": 0.45749696954329366, | |
| "learning_rate": 6.815406451303647e-05, | |
| "loss": 0.5619, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.626320064987815, | |
| "grad_norm": 0.3572180927283862, | |
| "learning_rate": 6.789187204375981e-05, | |
| "loss": 0.5637, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6393176279447603, | |
| "grad_norm": 0.37728781845068143, | |
| "learning_rate": 6.762732603144978e-05, | |
| "loss": 0.5605, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.652315190901706, | |
| "grad_norm": 0.4270428246291779, | |
| "learning_rate": 6.736044879875373e-05, | |
| "loss": 0.5536, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6653127538586516, | |
| "grad_norm": 0.299705291536536, | |
| "learning_rate": 6.709126286502965e-05, | |
| "loss": 0.5623, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.678310316815597, | |
| "grad_norm": 0.36624782812777956, | |
| "learning_rate": 6.681979094444596e-05, | |
| "loss": 0.5569, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.6913078797725425, | |
| "grad_norm": 0.33387655040953185, | |
| "learning_rate": 6.654605594406486e-05, | |
| "loss": 0.5575, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.7043054427294884, | |
| "grad_norm": 0.38850086520685223, | |
| "learning_rate": 6.627008096190938e-05, | |
| "loss": 0.5573, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.7173030056864338, | |
| "grad_norm": 0.412039261196286, | |
| "learning_rate": 6.59918892850144e-05, | |
| "loss": 0.5598, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.7303005686433792, | |
| "grad_norm": 0.4639064578164096, | |
| "learning_rate": 6.571150438746157e-05, | |
| "loss": 0.5573, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.743298131600325, | |
| "grad_norm": 0.569316518837733, | |
| "learning_rate": 6.542894992839873e-05, | |
| "loss": 0.5525, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.7562956945572705, | |
| "grad_norm": 0.7768980117231334, | |
| "learning_rate": 6.514424975004329e-05, | |
| "loss": 0.5569, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.769293257514216, | |
| "grad_norm": 0.8690174213866712, | |
| "learning_rate": 6.48574278756706e-05, | |
| "loss": 0.5581, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.7822908204711616, | |
| "grad_norm": 0.7189834805856271, | |
| "learning_rate": 6.456850850758673e-05, | |
| "loss": 0.5523, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.7952883834281073, | |
| "grad_norm": 0.42461801914103414, | |
| "learning_rate": 6.427751602508628e-05, | |
| "loss": 0.5513, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.8082859463850527, | |
| "grad_norm": 0.3638089421301393, | |
| "learning_rate": 6.398447498239527e-05, | |
| "loss": 0.5597, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.8212835093419983, | |
| "grad_norm": 0.4699533771325944, | |
| "learning_rate": 6.368941010659921e-05, | |
| "loss": 0.5582, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.834281072298944, | |
| "grad_norm": 0.4419813323602383, | |
| "learning_rate": 6.339234629555655e-05, | |
| "loss": 0.5561, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8472786352558894, | |
| "grad_norm": 0.36271241045794583, | |
| "learning_rate": 6.309330861579786e-05, | |
| "loss": 0.5584, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.860276198212835, | |
| "grad_norm": 0.40965930158743397, | |
| "learning_rate": 6.279232230041065e-05, | |
| "loss": 0.5536, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8732737611697807, | |
| "grad_norm": 0.40198532643300333, | |
| "learning_rate": 6.248941274691017e-05, | |
| "loss": 0.5511, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.8862713241267262, | |
| "grad_norm": 0.46299430742363373, | |
| "learning_rate": 6.218460551509636e-05, | |
| "loss": 0.5471, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.8992688870836718, | |
| "grad_norm": 0.5215127675687683, | |
| "learning_rate": 6.18779263248971e-05, | |
| "loss": 0.5553, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.9122664500406175, | |
| "grad_norm": 0.45729570755510884, | |
| "learning_rate": 6.156940105419785e-05, | |
| "loss": 0.5491, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.925264012997563, | |
| "grad_norm": 0.3015515408877481, | |
| "learning_rate": 6.125905573665824e-05, | |
| "loss": 0.5512, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.9382615759545085, | |
| "grad_norm": 0.4496510050196559, | |
| "learning_rate": 6.094691655951512e-05, | |
| "loss": 0.5537, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.9512591389114542, | |
| "grad_norm": 0.5782505006225216, | |
| "learning_rate": 6.063300986137297e-05, | |
| "loss": 0.5519, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.9642567018683996, | |
| "grad_norm": 0.48811843212065303, | |
| "learning_rate": 6.0317362129981375e-05, | |
| "loss": 0.5519, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.9772542648253453, | |
| "grad_norm": 0.3524549635448169, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.5555, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.990251827782291, | |
| "grad_norm": 0.3499895937576908, | |
| "learning_rate": 5.968095025075114e-05, | |
| "loss": 0.5571, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.0048740861088548, | |
| "grad_norm": 0.354088344538403, | |
| "learning_rate": 5.936023980395997e-05, | |
| "loss": 0.5401, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.0178716490658, | |
| "grad_norm": 0.4330890417017954, | |
| "learning_rate": 5.903789572148295e-05, | |
| "loss": 0.5114, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.0308692120227456, | |
| "grad_norm": 0.45299756805326713, | |
| "learning_rate": 5.871394520302432e-05, | |
| "loss": 0.5027, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.0438667749796915, | |
| "grad_norm": 0.36185553048035746, | |
| "learning_rate": 5.838841558384091e-05, | |
| "loss": 0.5004, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.056864337936637, | |
| "grad_norm": 0.4094243535630425, | |
| "learning_rate": 5.806133433243558e-05, | |
| "loss": 0.5089, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.0698619008935824, | |
| "grad_norm": 0.4119895854921856, | |
| "learning_rate": 5.7732729048239444e-05, | |
| "loss": 0.5041, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.0828594638505282, | |
| "grad_norm": 0.49951764317825964, | |
| "learning_rate": 5.740262745928293e-05, | |
| "loss": 0.5083, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.0958570268074737, | |
| "grad_norm": 0.6492588353532962, | |
| "learning_rate": 5.707105741985615e-05, | |
| "loss": 0.5089, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.108854589764419, | |
| "grad_norm": 0.8172546815280193, | |
| "learning_rate": 5.673804690815845e-05, | |
| "loss": 0.5125, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.121852152721365, | |
| "grad_norm": 0.8291933191610376, | |
| "learning_rate": 5.6403624023937614e-05, | |
| "loss": 0.5042, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.1348497156783104, | |
| "grad_norm": 0.6400143821799831, | |
| "learning_rate": 5.606781698611879e-05, | |
| "loss": 0.5019, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.147847278635256, | |
| "grad_norm": 0.5436979640183732, | |
| "learning_rate": 5.573065413042333e-05, | |
| "loss": 0.5045, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.1608448415922012, | |
| "grad_norm": 0.4492438740170881, | |
| "learning_rate": 5.5392163906977835e-05, | |
| "loss": 0.5067, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.173842404549147, | |
| "grad_norm": 0.40317590644042245, | |
| "learning_rate": 5.505237487791343e-05, | |
| "loss": 0.5041, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.1868399675060926, | |
| "grad_norm": 0.4137259663946232, | |
| "learning_rate": 5.471131571495574e-05, | |
| "loss": 0.5008, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.199837530463038, | |
| "grad_norm": 0.39271569786675564, | |
| "learning_rate": 5.4369015197005506e-05, | |
| "loss": 0.5109, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.212835093419984, | |
| "grad_norm": 0.3609357892178772, | |
| "learning_rate": 5.4025502207710184e-05, | |
| "loss": 0.5056, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.2258326563769293, | |
| "grad_norm": 0.32273394848970144, | |
| "learning_rate": 5.368080573302676e-05, | |
| "loss": 0.5004, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.2388302193338747, | |
| "grad_norm": 0.3902918387294514, | |
| "learning_rate": 5.333495485877583e-05, | |
| "loss": 0.5015, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.2518277822908206, | |
| "grad_norm": 0.3615272576113914, | |
| "learning_rate": 5.298797876818735e-05, | |
| "loss": 0.501, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.264825345247766, | |
| "grad_norm": 0.3092837009560793, | |
| "learning_rate": 5.263990673943811e-05, | |
| "loss": 0.5013, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.2778229082047114, | |
| "grad_norm": 0.3221337815028307, | |
| "learning_rate": 5.229076814318122e-05, | |
| "loss": 0.5023, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.2908204711616573, | |
| "grad_norm": 0.31876856314125945, | |
| "learning_rate": 5.194059244006779e-05, | |
| "loss": 0.4954, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.3038180341186028, | |
| "grad_norm": 0.3471396479864812, | |
| "learning_rate": 5.158940917826099e-05, | |
| "loss": 0.5041, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.316815597075548, | |
| "grad_norm": 0.30244403952200244, | |
| "learning_rate": 5.123724799094279e-05, | |
| "loss": 0.4968, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.329813160032494, | |
| "grad_norm": 0.27081780367373653, | |
| "learning_rate": 5.088413859381341e-05, | |
| "loss": 0.495, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.3428107229894395, | |
| "grad_norm": 0.26698926546704715, | |
| "learning_rate": 5.053011078258397e-05, | |
| "loss": 0.4986, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.355808285946385, | |
| "grad_norm": 0.2434254540157736, | |
| "learning_rate": 5.017519443046226e-05, | |
| "loss": 0.5059, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.368805848903331, | |
| "grad_norm": 0.2649836381754834, | |
| "learning_rate": 4.981941948563197e-05, | |
| "loss": 0.4939, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.381803411860276, | |
| "grad_norm": 0.24587524499861643, | |
| "learning_rate": 4.94628159687257e-05, | |
| "loss": 0.4955, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.3948009748172217, | |
| "grad_norm": 0.2099952601611583, | |
| "learning_rate": 4.9105413970291747e-05, | |
| "loss": 0.4998, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.4077985377741675, | |
| "grad_norm": 0.30733517181039643, | |
| "learning_rate": 4.874724364825504e-05, | |
| "loss": 0.5051, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.420796100731113, | |
| "grad_norm": 0.23936641127353223, | |
| "learning_rate": 4.8388335225372416e-05, | |
| "loss": 0.5042, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.4337936636880584, | |
| "grad_norm": 0.24807141319926213, | |
| "learning_rate": 4.802871898668237e-05, | |
| "loss": 0.5015, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.4467912266450043, | |
| "grad_norm": 0.23764312316041677, | |
| "learning_rate": 4.7668425276949546e-05, | |
| "loss": 0.5047, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.4597887896019497, | |
| "grad_norm": 0.19839288524791437, | |
| "learning_rate": 4.730748449810429e-05, | |
| "loss": 0.4945, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.472786352558895, | |
| "grad_norm": 0.21343457304874502, | |
| "learning_rate": 4.694592710667723e-05, | |
| "loss": 0.4948, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.4857839155158405, | |
| "grad_norm": 0.23293583325724027, | |
| "learning_rate": 4.658378361122936e-05, | |
| "loss": 0.4986, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.4987814784727864, | |
| "grad_norm": 0.21561162546636445, | |
| "learning_rate": 4.622108456977773e-05, | |
| "loss": 0.5027, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.511779041429732, | |
| "grad_norm": 0.20220078631632762, | |
| "learning_rate": 4.585786058721687e-05, | |
| "loss": 0.4992, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.5247766043866777, | |
| "grad_norm": 0.19015117259585296, | |
| "learning_rate": 4.549414231273633e-05, | |
| "loss": 0.5019, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.537774167343623, | |
| "grad_norm": 0.17888404685442738, | |
| "learning_rate": 4.512996043723453e-05, | |
| "loss": 0.5009, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.5507717303005686, | |
| "grad_norm": 0.1891627608715842, | |
| "learning_rate": 4.476534569072895e-05, | |
| "loss": 0.5005, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.563769293257514, | |
| "grad_norm": 0.1960672578073265, | |
| "learning_rate": 4.440032883976318e-05, | |
| "loss": 0.4979, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.57676685621446, | |
| "grad_norm": 0.1907240431557997, | |
| "learning_rate": 4.403494068481074e-05, | |
| "loss": 0.4965, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.5897644191714053, | |
| "grad_norm": 0.21281767640684754, | |
| "learning_rate": 4.3669212057676145e-05, | |
| "loss": 0.5035, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.602761982128351, | |
| "grad_norm": 0.20034978185083457, | |
| "learning_rate": 4.33031738188933e-05, | |
| "loss": 0.493, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.6157595450852966, | |
| "grad_norm": 0.18482405886366973, | |
| "learning_rate": 4.293685685512142e-05, | |
| "loss": 0.4979, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.628757108042242, | |
| "grad_norm": 0.20689788018303523, | |
| "learning_rate": 4.257029207653881e-05, | |
| "loss": 0.4968, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.6417546709991875, | |
| "grad_norm": 0.20468709904511792, | |
| "learning_rate": 4.220351041423462e-05, | |
| "loss": 0.496, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.6547522339561334, | |
| "grad_norm": 0.19708900779722158, | |
| "learning_rate": 4.183654281759888e-05, | |
| "loss": 0.5059, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.667749796913079, | |
| "grad_norm": 0.1694897534384111, | |
| "learning_rate": 4.1469420251710905e-05, | |
| "loss": 0.5001, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.680747359870024, | |
| "grad_norm": 0.2060507236034366, | |
| "learning_rate": 4.110217369472649e-05, | |
| "loss": 0.5042, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.69374492282697, | |
| "grad_norm": 0.21981117761095914, | |
| "learning_rate": 4.07348341352639e-05, | |
| "loss": 0.4925, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.7067424857839155, | |
| "grad_norm": 0.15869661704467267, | |
| "learning_rate": 4.0367432569789065e-05, | |
| "loss": 0.4925, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.719740048740861, | |
| "grad_norm": 0.17554453102907516, | |
| "learning_rate": 4e-05, | |
| "loss": 0.4898, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.732737611697807, | |
| "grad_norm": 0.20109162996025604, | |
| "learning_rate": 3.963256743021095e-05, | |
| "loss": 0.4935, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.7457351746547523, | |
| "grad_norm": 0.16965164550202852, | |
| "learning_rate": 3.92651658647361e-05, | |
| "loss": 0.4926, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.7587327376116977, | |
| "grad_norm": 0.19107787557510944, | |
| "learning_rate": 3.889782630527353e-05, | |
| "loss": 0.5003, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.7717303005686436, | |
| "grad_norm": 0.23600064349601493, | |
| "learning_rate": 3.853057974828911e-05, | |
| "loss": 0.5025, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.784727863525589, | |
| "grad_norm": 0.1938736742926907, | |
| "learning_rate": 3.816345718240113e-05, | |
| "loss": 0.496, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.7977254264825344, | |
| "grad_norm": 0.18674334250731434, | |
| "learning_rate": 3.779648958576538e-05, | |
| "loss": 0.4964, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.81072298943948, | |
| "grad_norm": 0.17897442021815202, | |
| "learning_rate": 3.74297079234612e-05, | |
| "loss": 0.4993, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.8237205523964257, | |
| "grad_norm": 0.1454966971656852, | |
| "learning_rate": 3.706314314487859e-05, | |
| "loss": 0.5024, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.836718115353371, | |
| "grad_norm": 0.17254506937932373, | |
| "learning_rate": 3.669682618110671e-05, | |
| "loss": 0.496, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.849715678310317, | |
| "grad_norm": 0.17898500506822662, | |
| "learning_rate": 3.6330787942323855e-05, | |
| "loss": 0.4971, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.8627132412672625, | |
| "grad_norm": 0.1775497414815859, | |
| "learning_rate": 3.5965059315189274e-05, | |
| "loss": 0.4994, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.875710804224208, | |
| "grad_norm": 0.15173916947706884, | |
| "learning_rate": 3.559967116023683e-05, | |
| "loss": 0.501, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.8887083671811533, | |
| "grad_norm": 0.2019676520697475, | |
| "learning_rate": 3.523465430927106e-05, | |
| "loss": 0.4975, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.901705930138099, | |
| "grad_norm": 0.17820583073365817, | |
| "learning_rate": 3.4870039562765475e-05, | |
| "loss": 0.497, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.9147034930950446, | |
| "grad_norm": 0.17230973288348897, | |
| "learning_rate": 3.4505857687263675e-05, | |
| "loss": 0.4918, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.9277010560519905, | |
| "grad_norm": 0.17781379922544957, | |
| "learning_rate": 3.414213941278314e-05, | |
| "loss": 0.5009, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.940698619008936, | |
| "grad_norm": 0.17434301641204888, | |
| "learning_rate": 3.377891543022229e-05, | |
| "loss": 0.4967, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.9536961819658814, | |
| "grad_norm": 0.15878938609241405, | |
| "learning_rate": 3.341621638877064e-05, | |
| "loss": 0.5022, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.966693744922827, | |
| "grad_norm": 0.1877350498093743, | |
| "learning_rate": 3.305407289332279e-05, | |
| "loss": 0.4943, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.9796913078797727, | |
| "grad_norm": 0.16153562862339216, | |
| "learning_rate": 3.269251550189573e-05, | |
| "loss": 0.4952, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.992688870836718, | |
| "grad_norm": 0.1636698530309882, | |
| "learning_rate": 3.2331574723050474e-05, | |
| "loss": 0.5036, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.007311129163282, | |
| "grad_norm": 0.24083959413053807, | |
| "learning_rate": 3.197128101331764e-05, | |
| "loss": 0.4787, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.0203086921202273, | |
| "grad_norm": 0.20882743227454145, | |
| "learning_rate": 3.161166477462759e-05, | |
| "loss": 0.4527, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.033306255077173, | |
| "grad_norm": 0.2562598902235666, | |
| "learning_rate": 3.125275635174497e-05, | |
| "loss": 0.4566, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.0463038180341186, | |
| "grad_norm": 0.29932911926131295, | |
| "learning_rate": 3.089458602970828e-05, | |
| "loss": 0.4492, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.059301380991064, | |
| "grad_norm": 0.22296216539718972, | |
| "learning_rate": 3.0537184031274306e-05, | |
| "loss": 0.452, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.07229894394801, | |
| "grad_norm": 0.265839442067331, | |
| "learning_rate": 3.0180580514368037e-05, | |
| "loss": 0.4591, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.0852965069049554, | |
| "grad_norm": 0.26317399079157505, | |
| "learning_rate": 2.9824805569537747e-05, | |
| "loss": 0.4485, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.098294069861901, | |
| "grad_norm": 0.21663346321522212, | |
| "learning_rate": 2.9469889217416045e-05, | |
| "loss": 0.4556, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.1112916328188467, | |
| "grad_norm": 0.2650899931767454, | |
| "learning_rate": 2.9115861406186593e-05, | |
| "loss": 0.4537, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.124289195775792, | |
| "grad_norm": 0.2506262975062407, | |
| "learning_rate": 2.8762752009057232e-05, | |
| "loss": 0.4535, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.1372867587327375, | |
| "grad_norm": 0.22119457110883312, | |
| "learning_rate": 2.841059082173902e-05, | |
| "loss": 0.4502, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.150284321689683, | |
| "grad_norm": 0.23862545493558096, | |
| "learning_rate": 2.805940755993223e-05, | |
| "loss": 0.4537, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.163281884646629, | |
| "grad_norm": 0.18800051564766848, | |
| "learning_rate": 2.770923185681878e-05, | |
| "loss": 0.4534, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.1762794476035743, | |
| "grad_norm": 0.21073908239980146, | |
| "learning_rate": 2.7360093260561904e-05, | |
| "loss": 0.4537, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.1892770105605197, | |
| "grad_norm": 0.19564030664878582, | |
| "learning_rate": 2.7012021231812666e-05, | |
| "loss": 0.4533, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.2022745735174656, | |
| "grad_norm": 0.1689855239948926, | |
| "learning_rate": 2.6665045141224193e-05, | |
| "loss": 0.451, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.215272136474411, | |
| "grad_norm": 0.20951707110954032, | |
| "learning_rate": 2.6319194266973256e-05, | |
| "loss": 0.4557, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.2282696994313564, | |
| "grad_norm": 0.16999772257768134, | |
| "learning_rate": 2.597449779228983e-05, | |
| "loss": 0.4468, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.2412672623883023, | |
| "grad_norm": 0.19094834892704782, | |
| "learning_rate": 2.563098480299451e-05, | |
| "loss": 0.4554, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.2542648253452477, | |
| "grad_norm": 0.16246873328527864, | |
| "learning_rate": 2.5288684285044283e-05, | |
| "loss": 0.4549, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.267262388302193, | |
| "grad_norm": 0.1825793103224904, | |
| "learning_rate": 2.4947625122086585e-05, | |
| "loss": 0.4543, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.280259951259139, | |
| "grad_norm": 0.15892372670398716, | |
| "learning_rate": 2.460783609302218e-05, | |
| "loss": 0.4575, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.2932575142160845, | |
| "grad_norm": 0.17673710311378757, | |
| "learning_rate": 2.4269345869576676e-05, | |
| "loss": 0.452, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.30625507717303, | |
| "grad_norm": 0.15259667013543174, | |
| "learning_rate": 2.393218301388123e-05, | |
| "loss": 0.4507, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.319252640129976, | |
| "grad_norm": 0.15618013871442282, | |
| "learning_rate": 2.35963759760624e-05, | |
| "loss": 0.4483, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.332250203086921, | |
| "grad_norm": 0.1383646033070645, | |
| "learning_rate": 2.3261953091841553e-05, | |
| "loss": 0.4466, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.3452477660438666, | |
| "grad_norm": 0.15025307639697422, | |
| "learning_rate": 2.2928942580143855e-05, | |
| "loss": 0.457, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.3582453290008125, | |
| "grad_norm": 0.12942060560157578, | |
| "learning_rate": 2.2597372540717083e-05, | |
| "loss": 0.4494, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.371242891957758, | |
| "grad_norm": 0.13761592476330478, | |
| "learning_rate": 2.226727095176057e-05, | |
| "loss": 0.4577, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.3842404549147034, | |
| "grad_norm": 0.12368831090048717, | |
| "learning_rate": 2.1938665667564435e-05, | |
| "loss": 0.45, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.3972380178716493, | |
| "grad_norm": 0.13980977768107444, | |
| "learning_rate": 2.1611584416159106e-05, | |
| "loss": 0.4601, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.4102355808285947, | |
| "grad_norm": 0.1292981782906244, | |
| "learning_rate": 2.1286054796975696e-05, | |
| "loss": 0.4544, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.42323314378554, | |
| "grad_norm": 0.13081878013901876, | |
| "learning_rate": 2.096210427851706e-05, | |
| "loss": 0.4499, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.436230706742486, | |
| "grad_norm": 0.13234583952511578, | |
| "learning_rate": 2.063976019604006e-05, | |
| "loss": 0.4492, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.4492282696994314, | |
| "grad_norm": 0.13707720449565938, | |
| "learning_rate": 2.0319049749248876e-05, | |
| "loss": 0.4576, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.462225832656377, | |
| "grad_norm": 0.1275609686612589, | |
| "learning_rate": 2.0000000000000012e-05, | |
| "loss": 0.4478, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.4752233956133223, | |
| "grad_norm": 0.13858936276545247, | |
| "learning_rate": 1.9682637870018638e-05, | |
| "loss": 0.4503, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.488220958570268, | |
| "grad_norm": 0.13526985297489078, | |
| "learning_rate": 1.9366990138627054e-05, | |
| "loss": 0.451, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.5012185215272136, | |
| "grad_norm": 0.13143298791548755, | |
| "learning_rate": 1.9053083440484887e-05, | |
| "loss": 0.4469, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.5142160844841595, | |
| "grad_norm": 0.13651218753852146, | |
| "learning_rate": 1.8740944263341773e-05, | |
| "loss": 0.4538, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.527213647441105, | |
| "grad_norm": 0.13559937030356303, | |
| "learning_rate": 1.8430598945802156e-05, | |
| "loss": 0.4578, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 3.5402112103980503, | |
| "grad_norm": 0.13345629350619237, | |
| "learning_rate": 1.8122073675102935e-05, | |
| "loss": 0.4501, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 3.5532087733549957, | |
| "grad_norm": 0.13156084596113124, | |
| "learning_rate": 1.781539448490365e-05, | |
| "loss": 0.4508, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 3.5662063363119416, | |
| "grad_norm": 0.12230522417142756, | |
| "learning_rate": 1.7510587253089842e-05, | |
| "loss": 0.4564, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 3.579203899268887, | |
| "grad_norm": 0.12733451660742465, | |
| "learning_rate": 1.7207677699589355e-05, | |
| "loss": 0.4562, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.592201462225833, | |
| "grad_norm": 0.13918819865169338, | |
| "learning_rate": 1.690669138420215e-05, | |
| "loss": 0.4539, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 3.6051990251827783, | |
| "grad_norm": 0.12910395860504983, | |
| "learning_rate": 1.6607653704443457e-05, | |
| "loss": 0.4549, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 3.618196588139724, | |
| "grad_norm": 0.11652161500078773, | |
| "learning_rate": 1.6310589893400804e-05, | |
| "loss": 0.4495, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 3.631194151096669, | |
| "grad_norm": 0.1435144989657529, | |
| "learning_rate": 1.601552501760473e-05, | |
| "loss": 0.4615, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 3.644191714053615, | |
| "grad_norm": 0.11768536430982104, | |
| "learning_rate": 1.5722483974913737e-05, | |
| "loss": 0.452, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.6571892770105605, | |
| "grad_norm": 0.12124441944302719, | |
| "learning_rate": 1.5431491492413288e-05, | |
| "loss": 0.4509, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.670186839967506, | |
| "grad_norm": 0.11219311820617218, | |
| "learning_rate": 1.5142572124329418e-05, | |
| "loss": 0.4536, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.683184402924452, | |
| "grad_norm": 0.11088572665376577, | |
| "learning_rate": 1.4855750249956718e-05, | |
| "loss": 0.4533, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.6961819658813972, | |
| "grad_norm": 0.1114181991679099, | |
| "learning_rate": 1.457105007160129e-05, | |
| "loss": 0.4583, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.7091795288383427, | |
| "grad_norm": 0.1150092625579184, | |
| "learning_rate": 1.4288495612538427e-05, | |
| "loss": 0.4546, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.722177091795288, | |
| "grad_norm": 0.11256641012147124, | |
| "learning_rate": 1.4008110714985623e-05, | |
| "loss": 0.4562, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.735174654752234, | |
| "grad_norm": 0.1121146716270174, | |
| "learning_rate": 1.3729919038090627e-05, | |
| "loss": 0.4531, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.7481722177091794, | |
| "grad_norm": 0.12240062452155943, | |
| "learning_rate": 1.3453944055935151e-05, | |
| "loss": 0.4554, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.7611697806661253, | |
| "grad_norm": 0.10662890450993669, | |
| "learning_rate": 1.3180209055554043e-05, | |
| "loss": 0.4557, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.7741673436230707, | |
| "grad_norm": 0.11472028819426605, | |
| "learning_rate": 1.2908737134970367e-05, | |
| "loss": 0.4568, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.787164906580016, | |
| "grad_norm": 0.11298609292901812, | |
| "learning_rate": 1.2639551201246278e-05, | |
| "loss": 0.4514, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 3.8001624695369616, | |
| "grad_norm": 0.10976020682705631, | |
| "learning_rate": 1.2372673968550229e-05, | |
| "loss": 0.4476, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 3.8131600324939074, | |
| "grad_norm": 0.10531475963763727, | |
| "learning_rate": 1.2108127956240186e-05, | |
| "loss": 0.4471, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 3.826157595450853, | |
| "grad_norm": 0.10623689348961035, | |
| "learning_rate": 1.1845935486963546e-05, | |
| "loss": 0.4515, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 3.8391551584077988, | |
| "grad_norm": 0.10834285056205656, | |
| "learning_rate": 1.158611868477344e-05, | |
| "loss": 0.4484, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.852152721364744, | |
| "grad_norm": 0.10958894542102338, | |
| "learning_rate": 1.1328699473261957e-05, | |
| "loss": 0.446, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.8651502843216896, | |
| "grad_norm": 0.10719668132712344, | |
| "learning_rate": 1.107369957371013e-05, | |
| "loss": 0.4587, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.878147847278635, | |
| "grad_norm": 0.10865202402459116, | |
| "learning_rate": 1.0821140503255174e-05, | |
| "loss": 0.4478, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 3.891145410235581, | |
| "grad_norm": 0.1075674174283554, | |
| "learning_rate": 1.0571043573074737e-05, | |
| "loss": 0.4474, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 3.9041429731925263, | |
| "grad_norm": 0.10420773620004607, | |
| "learning_rate": 1.0323429886588743e-05, | |
| "loss": 0.4558, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.917140536149472, | |
| "grad_norm": 0.10602592166217871, | |
| "learning_rate": 1.0078320337678584e-05, | |
| "loss": 0.4472, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 3.9301380991064176, | |
| "grad_norm": 0.11214585082716343, | |
| "learning_rate": 9.835735608924155e-06, | |
| "loss": 0.4532, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 3.943135662063363, | |
| "grad_norm": 0.10680829021211037, | |
| "learning_rate": 9.595696169858542e-06, | |
| "loss": 0.452, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 3.9561332250203085, | |
| "grad_norm": 0.10193849061969358, | |
| "learning_rate": 9.358222275240884e-06, | |
| "loss": 0.4548, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 3.9691307879772544, | |
| "grad_norm": 0.10621117615673921, | |
| "learning_rate": 9.123333963347166e-06, | |
| "loss": 0.4568, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.9821283509342, | |
| "grad_norm": 0.10611037774239646, | |
| "learning_rate": 8.89105105427945e-06, | |
| "loss": 0.4503, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 3.9951259138911457, | |
| "grad_norm": 0.10224681565036052, | |
| "learning_rate": 8.661393148293355e-06, | |
| "loss": 0.4514, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.0097481722177095, | |
| "grad_norm": 0.1629992310965872, | |
| "learning_rate": 8.434379624144261e-06, | |
| "loss": 0.4384, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.022745735174655, | |
| "grad_norm": 0.16305297766108903, | |
| "learning_rate": 8.210029637452016e-06, | |
| "loss": 0.4219, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.0357432981316, | |
| "grad_norm": 0.1152257478387255, | |
| "learning_rate": 7.988362119084642e-06, | |
| "loss": 0.423, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.048740861088546, | |
| "grad_norm": 0.12619317639889938, | |
| "learning_rate": 7.769395773560874e-06, | |
| "loss": 0.4265, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.061738424045491, | |
| "grad_norm": 0.1605893684483608, | |
| "learning_rate": 7.553149077471915e-06, | |
| "loss": 0.4293, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.074735987002437, | |
| "grad_norm": 0.16787469942852876, | |
| "learning_rate": 7.3396402779222845e-06, | |
| "loss": 0.4269, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.087733549959383, | |
| "grad_norm": 0.13639739623886823, | |
| "learning_rate": 7.128887390990198e-06, | |
| "loss": 0.4203, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.100731112916328, | |
| "grad_norm": 0.12896797685517175, | |
| "learning_rate": 6.9209082002072725e-06, | |
| "loss": 0.4237, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.113728675873274, | |
| "grad_norm": 0.13126789348210258, | |
| "learning_rate": 6.715720255058e-06, | |
| "loss": 0.4323, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.126726238830219, | |
| "grad_norm": 0.13882914474015806, | |
| "learning_rate": 6.513340869498859e-06, | |
| "loss": 0.4232, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.139723801787165, | |
| "grad_norm": 0.13742788716133153, | |
| "learning_rate": 6.313787120497376e-06, | |
| "loss": 0.427, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.15272136474411, | |
| "grad_norm": 0.12658794759546255, | |
| "learning_rate": 6.117075846591123e-06, | |
| "loss": 0.422, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.1657189277010565, | |
| "grad_norm": 0.11656720107903619, | |
| "learning_rate": 5.923223646466923e-06, | |
| "loss": 0.424, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.178716490658002, | |
| "grad_norm": 0.12017679947679329, | |
| "learning_rate": 5.732246877560146e-06, | |
| "loss": 0.4239, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.191714053614947, | |
| "grad_norm": 0.1248953898447142, | |
| "learning_rate": 5.5441616546745646e-06, | |
| "loss": 0.4247, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.204711616571893, | |
| "grad_norm": 0.11492392711495635, | |
| "learning_rate": 5.358983848622452e-06, | |
| "loss": 0.423, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.217709179528838, | |
| "grad_norm": 0.10979437544809782, | |
| "learning_rate": 5.176729084885508e-06, | |
| "loss": 0.426, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.230706742485784, | |
| "grad_norm": 0.10996774248170614, | |
| "learning_rate": 4.99741274229625e-06, | |
| "loss": 0.424, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.24370430544273, | |
| "grad_norm": 0.11864930806086794, | |
| "learning_rate": 4.821049951740442e-06, | |
| "loss": 0.4236, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.256701868399675, | |
| "grad_norm": 0.11614244217733552, | |
| "learning_rate": 4.647655594880225e-06, | |
| "loss": 0.4264, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.269699431356621, | |
| "grad_norm": 0.10489433778288049, | |
| "learning_rate": 4.4772443028985004e-06, | |
| "loss": 0.4306, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.282696994313566, | |
| "grad_norm": 0.1045337845950868, | |
| "learning_rate": 4.3098304552642385e-06, | |
| "loss": 0.4279, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.295694557270512, | |
| "grad_norm": 0.10425269207053385, | |
| "learning_rate": 4.1454281785191995e-06, | |
| "loss": 0.4219, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.308692120227457, | |
| "grad_norm": 0.10799333991055605, | |
| "learning_rate": 3.984051345085855e-06, | |
| "loss": 0.4239, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.3216896831844025, | |
| "grad_norm": 0.11546875761746016, | |
| "learning_rate": 3.825713572096903e-06, | |
| "loss": 0.4188, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.334687246141349, | |
| "grad_norm": 0.10158288013636599, | |
| "learning_rate": 3.6704282202461515e-06, | |
| "loss": 0.4243, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.347684809098294, | |
| "grad_norm": 0.09998970634420433, | |
| "learning_rate": 3.518208392661184e-06, | |
| "loss": 0.4221, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.36068237205524, | |
| "grad_norm": 0.1072651386764742, | |
| "learning_rate": 3.3690669337977e-06, | |
| "loss": 0.4204, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.373679935012185, | |
| "grad_norm": 0.10189852488748347, | |
| "learning_rate": 3.2230164283556918e-06, | |
| "loss": 0.4218, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 4.3866774979691305, | |
| "grad_norm": 0.09855467315288223, | |
| "learning_rate": 3.080069200217497e-06, | |
| "loss": 0.424, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 4.399675060926076, | |
| "grad_norm": 0.09678471475059303, | |
| "learning_rate": 2.9402373114079295e-06, | |
| "loss": 0.4237, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 4.412672623883022, | |
| "grad_norm": 0.09641330522520603, | |
| "learning_rate": 2.803532561076492e-06, | |
| "loss": 0.4294, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 4.425670186839968, | |
| "grad_norm": 0.09993981323574717, | |
| "learning_rate": 2.669966484501716e-06, | |
| "loss": 0.4291, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.438667749796913, | |
| "grad_norm": 0.09491135655205166, | |
| "learning_rate": 2.5395503521178143e-06, | |
| "loss": 0.425, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 4.451665312753859, | |
| "grad_norm": 0.10027652165084279, | |
| "learning_rate": 2.4122951685636674e-06, | |
| "loss": 0.4287, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 4.464662875710804, | |
| "grad_norm": 0.09768195784704334, | |
| "learning_rate": 2.2882116717542634e-06, | |
| "loss": 0.4297, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 4.477660438667749, | |
| "grad_norm": 0.09320352715058745, | |
| "learning_rate": 2.1673103319746146e-06, | |
| "loss": 0.422, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 4.490658001624696, | |
| "grad_norm": 0.09297954183991879, | |
| "learning_rate": 2.049601350996233e-06, | |
| "loss": 0.4216, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 4.503655564581641, | |
| "grad_norm": 0.09580363639852763, | |
| "learning_rate": 1.93509466121633e-06, | |
| "loss": 0.4221, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 4.516653127538587, | |
| "grad_norm": 0.09572555821764618, | |
| "learning_rate": 1.8237999248197002e-06, | |
| "loss": 0.4261, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 4.529650690495532, | |
| "grad_norm": 0.09729341717065641, | |
| "learning_rate": 1.7157265329634354e-06, | |
| "loss": 0.422, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 4.5426482534524775, | |
| "grad_norm": 0.09262430982141642, | |
| "learning_rate": 1.6108836049844434e-06, | |
| "loss": 0.4275, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 4.555645816409423, | |
| "grad_norm": 0.09643151222343373, | |
| "learning_rate": 1.5092799876299835e-06, | |
| "loss": 0.4318, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.568643379366369, | |
| "grad_norm": 0.09552392662018869, | |
| "learning_rate": 1.4109242543111834e-06, | |
| "loss": 0.4262, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 4.581640942323315, | |
| "grad_norm": 0.0934634073485968, | |
| "learning_rate": 1.3158247043795735e-06, | |
| "loss": 0.4191, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 4.59463850528026, | |
| "grad_norm": 0.09051362955312056, | |
| "learning_rate": 1.2239893624267852e-06, | |
| "loss": 0.4249, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 4.6076360682372055, | |
| "grad_norm": 0.10311548227178086, | |
| "learning_rate": 1.1354259776074472e-06, | |
| "loss": 0.421, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 4.620633631194151, | |
| "grad_norm": 0.09034380750409249, | |
| "learning_rate": 1.050142022985292e-06, | |
| "loss": 0.4234, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 4.633631194151096, | |
| "grad_norm": 0.0912218320639531, | |
| "learning_rate": 9.681446949025752e-07, | |
| "loss": 0.4231, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 4.646628757108042, | |
| "grad_norm": 0.0907014935264679, | |
| "learning_rate": 8.89440912372832e-07, | |
| "loss": 0.4162, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 4.659626320064988, | |
| "grad_norm": 0.08963535898951595, | |
| "learning_rate": 8.140373164970428e-07, | |
| "loss": 0.4286, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 4.6726238830219335, | |
| "grad_norm": 0.09182311609539749, | |
| "learning_rate": 7.419402699032852e-07, | |
| "loss": 0.4213, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 4.685621445978879, | |
| "grad_norm": 0.08836898405911321, | |
| "learning_rate": 6.731558562097995e-07, | |
| "loss": 0.4247, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.698619008935824, | |
| "grad_norm": 0.09028610838105593, | |
| "learning_rate": 6.076898795116792e-07, | |
| "loss": 0.4253, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 4.71161657189277, | |
| "grad_norm": 0.09100823747148952, | |
| "learning_rate": 5.455478638911071e-07, | |
| "loss": 0.4251, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 4.724614134849716, | |
| "grad_norm": 0.09029025365560213, | |
| "learning_rate": 4.867350529512261e-07, | |
| "loss": 0.4187, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 4.737611697806662, | |
| "grad_norm": 0.08743205850314692, | |
| "learning_rate": 4.3125640937368373e-07, | |
| "loss": 0.4254, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 4.750609260763607, | |
| "grad_norm": 0.0922555367394928, | |
| "learning_rate": 3.791166144998704e-07, | |
| "loss": 0.423, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 4.763606823720552, | |
| "grad_norm": 0.08845599991505329, | |
| "learning_rate": 3.3032006793590977e-07, | |
| "loss": 0.4224, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 4.776604386677498, | |
| "grad_norm": 0.0915393101514765, | |
| "learning_rate": 2.848708871814054e-07, | |
| "loss": 0.4255, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 4.789601949634443, | |
| "grad_norm": 0.0888246620842322, | |
| "learning_rate": 2.4277290728202063e-07, | |
| "loss": 0.4202, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 4.802599512591389, | |
| "grad_norm": 0.0912092493664706, | |
| "learning_rate": 2.040296805058528e-07, | |
| "loss": 0.4316, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 4.815597075548335, | |
| "grad_norm": 0.09110426289157889, | |
| "learning_rate": 1.6864447604370004e-07, | |
| "loss": 0.43, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 4.8285946385052805, | |
| "grad_norm": 0.08633872492794921, | |
| "learning_rate": 1.3662027973320614e-07, | |
| "loss": 0.4311, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 4.841592201462226, | |
| "grad_norm": 0.08888780145070413, | |
| "learning_rate": 1.0795979380690657e-07, | |
| "loss": 0.4182, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 4.854589764419171, | |
| "grad_norm": 0.08984098123092447, | |
| "learning_rate": 8.266543666421544e-08, | |
| "loss": 0.4172, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 4.867587327376117, | |
| "grad_norm": 0.08852973851348196, | |
| "learning_rate": 6.073934266735303e-08, | |
| "loss": 0.4234, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 4.880584890333062, | |
| "grad_norm": 0.08776688037991925, | |
| "learning_rate": 4.218336196125439e-08, | |
| "loss": 0.4268, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 4.8935824532900085, | |
| "grad_norm": 0.08708562125303575, | |
| "learning_rate": 2.699906031745414e-08, | |
| "loss": 0.4234, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 4.906580016246954, | |
| "grad_norm": 0.08866044810066966, | |
| "learning_rate": 1.5187719001943378e-08, | |
| "loss": 0.4217, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 4.919577579203899, | |
| "grad_norm": 0.08852375482148439, | |
| "learning_rate": 6.750334667091629e-09, | |
| "loss": 0.4235, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 4.932575142160845, | |
| "grad_norm": 0.08832697038072729, | |
| "learning_rate": 1.6876192675052695e-09, | |
| "loss": 0.4274, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 4.94557270511779, | |
| "grad_norm": 0.08596221358605577, | |
| "learning_rate": 0.0, | |
| "loss": 0.4228, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 4.94557270511779, | |
| "step": 380, | |
| "total_flos": 9.778530867524665e+18, | |
| "train_loss": 0.5321214740997866, | |
| "train_runtime": 38508.3509, | |
| "train_samples_per_second": 5.112, | |
| "train_steps_per_second": 0.01 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 380, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.778530867524665e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |