| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 4954, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002018927444794953, | |
| "grad_norm": 2.3389563305266345, | |
| "learning_rate": 1.8145161290322583e-07, | |
| "loss": 0.7881, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004037854889589906, | |
| "grad_norm": 2.024167058240825, | |
| "learning_rate": 3.830645161290323e-07, | |
| "loss": 0.8121, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.006056782334384858, | |
| "grad_norm": 1.4351269819866417, | |
| "learning_rate": 5.846774193548388e-07, | |
| "loss": 0.7797, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.008075709779179811, | |
| "grad_norm": 0.9310252821124614, | |
| "learning_rate": 7.862903225806453e-07, | |
| "loss": 0.7318, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.010094637223974764, | |
| "grad_norm": 0.9557517206875442, | |
| "learning_rate": 9.879032258064516e-07, | |
| "loss": 0.7018, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.012113564668769715, | |
| "grad_norm": 0.5829217778568054, | |
| "learning_rate": 1.1895161290322582e-06, | |
| "loss": 0.6708, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.014132492113564668, | |
| "grad_norm": 0.5112881851342524, | |
| "learning_rate": 1.3911290322580644e-06, | |
| "loss": 0.6417, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.016151419558359623, | |
| "grad_norm": 0.5140187941233466, | |
| "learning_rate": 1.5927419354838712e-06, | |
| "loss": 0.6227, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.018170347003154574, | |
| "grad_norm": 0.4872446144280347, | |
| "learning_rate": 1.7943548387096777e-06, | |
| "loss": 0.615, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02018927444794953, | |
| "grad_norm": 0.5191969395080858, | |
| "learning_rate": 1.9959677419354837e-06, | |
| "loss": 0.6105, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.02220820189274448, | |
| "grad_norm": 0.451802382492337, | |
| "learning_rate": 2.1975806451612907e-06, | |
| "loss": 0.5911, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.02422712933753943, | |
| "grad_norm": 0.42163928465445194, | |
| "learning_rate": 2.399193548387097e-06, | |
| "loss": 0.5921, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.026246056782334385, | |
| "grad_norm": 0.4552027864136731, | |
| "learning_rate": 2.6008064516129032e-06, | |
| "loss": 0.5818, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.028264984227129336, | |
| "grad_norm": 0.44985963500335385, | |
| "learning_rate": 2.8024193548387097e-06, | |
| "loss": 0.5876, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03028391167192429, | |
| "grad_norm": 0.5111630527751778, | |
| "learning_rate": 3.0040322580645167e-06, | |
| "loss": 0.5728, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.032302839116719245, | |
| "grad_norm": 0.44240607593167414, | |
| "learning_rate": 3.2056451612903228e-06, | |
| "loss": 0.5643, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.03432176656151419, | |
| "grad_norm": 0.4865981695157576, | |
| "learning_rate": 3.4072580645161293e-06, | |
| "loss": 0.5617, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03634069400630915, | |
| "grad_norm": 0.6085288533687223, | |
| "learning_rate": 3.6088709677419358e-06, | |
| "loss": 0.5501, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0383596214511041, | |
| "grad_norm": 0.47289717909858964, | |
| "learning_rate": 3.8104838709677423e-06, | |
| "loss": 0.559, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04037854889589906, | |
| "grad_norm": 0.5389683685989423, | |
| "learning_rate": 4.012096774193548e-06, | |
| "loss": 0.5601, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.042397476340694004, | |
| "grad_norm": 0.44905068010794785, | |
| "learning_rate": 4.213709677419355e-06, | |
| "loss": 0.546, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.04441640378548896, | |
| "grad_norm": 0.4738265544351665, | |
| "learning_rate": 4.415322580645161e-06, | |
| "loss": 0.5527, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.04643533123028391, | |
| "grad_norm": 0.48116551901846, | |
| "learning_rate": 4.616935483870968e-06, | |
| "loss": 0.5428, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04845425867507886, | |
| "grad_norm": 0.4514773717137442, | |
| "learning_rate": 4.818548387096774e-06, | |
| "loss": 0.5338, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.050473186119873815, | |
| "grad_norm": 0.5092040454559039, | |
| "learning_rate": 5.020161290322581e-06, | |
| "loss": 0.5546, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.05249211356466877, | |
| "grad_norm": 0.45858657596184155, | |
| "learning_rate": 5.221774193548388e-06, | |
| "loss": 0.5408, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.054511041009463725, | |
| "grad_norm": 0.47766211942842146, | |
| "learning_rate": 5.423387096774194e-06, | |
| "loss": 0.5427, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.05652996845425867, | |
| "grad_norm": 0.5108229913280311, | |
| "learning_rate": 5.625e-06, | |
| "loss": 0.5357, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.05854889589905363, | |
| "grad_norm": 0.5232937676969737, | |
| "learning_rate": 5.8266129032258064e-06, | |
| "loss": 0.5316, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06056782334384858, | |
| "grad_norm": 0.5241114539289579, | |
| "learning_rate": 6.028225806451613e-06, | |
| "loss": 0.5444, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.06258675078864354, | |
| "grad_norm": 0.9566252867340465, | |
| "learning_rate": 6.2298387096774194e-06, | |
| "loss": 0.5256, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.06460567823343849, | |
| "grad_norm": 0.4700069264487592, | |
| "learning_rate": 6.431451612903226e-06, | |
| "loss": 0.535, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.06662460567823345, | |
| "grad_norm": 0.46343674018411934, | |
| "learning_rate": 6.633064516129033e-06, | |
| "loss": 0.5281, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.06864353312302839, | |
| "grad_norm": 0.46610206235853135, | |
| "learning_rate": 6.834677419354839e-06, | |
| "loss": 0.5324, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.07066246056782334, | |
| "grad_norm": 0.5217091961421014, | |
| "learning_rate": 7.0362903225806454e-06, | |
| "loss": 0.5321, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0726813880126183, | |
| "grad_norm": 0.4532872443875366, | |
| "learning_rate": 7.2379032258064515e-06, | |
| "loss": 0.5235, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.07470031545741325, | |
| "grad_norm": 0.5481191485070994, | |
| "learning_rate": 7.4395161290322585e-06, | |
| "loss": 0.5203, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.0767192429022082, | |
| "grad_norm": 0.4817884817896886, | |
| "learning_rate": 7.641129032258065e-06, | |
| "loss": 0.5289, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.07873817034700316, | |
| "grad_norm": 0.463062633118143, | |
| "learning_rate": 7.842741935483872e-06, | |
| "loss": 0.5199, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08075709779179811, | |
| "grad_norm": 0.5267129255916911, | |
| "learning_rate": 8.044354838709678e-06, | |
| "loss": 0.5239, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.08277602523659305, | |
| "grad_norm": 0.4703183395271576, | |
| "learning_rate": 8.245967741935484e-06, | |
| "loss": 0.5217, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.08479495268138801, | |
| "grad_norm": 0.4559407675951848, | |
| "learning_rate": 8.447580645161291e-06, | |
| "loss": 0.5187, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.08681388012618296, | |
| "grad_norm": 0.4638358181971901, | |
| "learning_rate": 8.649193548387097e-06, | |
| "loss": 0.5194, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.08883280757097792, | |
| "grad_norm": 0.47768712570505284, | |
| "learning_rate": 8.850806451612905e-06, | |
| "loss": 0.513, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.09085173501577287, | |
| "grad_norm": 0.5140723294282747, | |
| "learning_rate": 9.05241935483871e-06, | |
| "loss": 0.5161, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.09287066246056783, | |
| "grad_norm": 0.4719590441343577, | |
| "learning_rate": 9.254032258064517e-06, | |
| "loss": 0.5033, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.09488958990536278, | |
| "grad_norm": 0.5031363320062582, | |
| "learning_rate": 9.455645161290323e-06, | |
| "loss": 0.5145, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.09690851735015772, | |
| "grad_norm": 0.5027908604602157, | |
| "learning_rate": 9.65725806451613e-06, | |
| "loss": 0.5178, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.09892744479495268, | |
| "grad_norm": 0.5046628095943158, | |
| "learning_rate": 9.858870967741936e-06, | |
| "loss": 0.5069, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.10094637223974763, | |
| "grad_norm": 0.5522780839548007, | |
| "learning_rate": 9.99998882617222e-06, | |
| "loss": 0.5169, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.10296529968454259, | |
| "grad_norm": 0.48260445699138166, | |
| "learning_rate": 9.999790181734308e-06, | |
| "loss": 0.5067, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.10498422712933754, | |
| "grad_norm": 0.4875096921972213, | |
| "learning_rate": 9.999343241367322e-06, | |
| "loss": 0.5152, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.1070031545741325, | |
| "grad_norm": 0.5080882476390746, | |
| "learning_rate": 9.998648027266884e-06, | |
| "loss": 0.5131, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.10902208201892745, | |
| "grad_norm": 0.5107719615388011, | |
| "learning_rate": 9.997704573958209e-06, | |
| "loss": 0.5053, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1110410094637224, | |
| "grad_norm": 0.5361602322829081, | |
| "learning_rate": 9.996512928294389e-06, | |
| "loss": 0.5083, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.11305993690851734, | |
| "grad_norm": 0.4851941991561818, | |
| "learning_rate": 9.995073149454057e-06, | |
| "loss": 0.4997, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1150788643533123, | |
| "grad_norm": 0.49258962377904003, | |
| "learning_rate": 9.99338530893846e-06, | |
| "loss": 0.516, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.11709779179810725, | |
| "grad_norm": 0.4835880405802275, | |
| "learning_rate": 9.991449490567901e-06, | |
| "loss": 0.5073, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.11911671924290221, | |
| "grad_norm": 0.507245673357914, | |
| "learning_rate": 9.989265790477581e-06, | |
| "loss": 0.5014, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.12113564668769716, | |
| "grad_norm": 0.5146575464998254, | |
| "learning_rate": 9.986834317112817e-06, | |
| "loss": 0.5177, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.12315457413249212, | |
| "grad_norm": 0.4662328996304102, | |
| "learning_rate": 9.984155191223663e-06, | |
| "loss": 0.5054, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.12517350157728707, | |
| "grad_norm": 0.4571893765964666, | |
| "learning_rate": 9.981228545858913e-06, | |
| "loss": 0.5086, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.12719242902208203, | |
| "grad_norm": 0.4697827268486345, | |
| "learning_rate": 9.978054526359493e-06, | |
| "loss": 0.5029, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.12921135646687698, | |
| "grad_norm": 0.5031062337107917, | |
| "learning_rate": 9.97463329035124e-06, | |
| "loss": 0.5021, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.13123028391167194, | |
| "grad_norm": 0.571029993720401, | |
| "learning_rate": 9.970965007737081e-06, | |
| "loss": 0.5077, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1332492113564669, | |
| "grad_norm": 0.45753840087765046, | |
| "learning_rate": 9.967049860688587e-06, | |
| "loss": 0.4961, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.13526813880126182, | |
| "grad_norm": 0.5189371422618692, | |
| "learning_rate": 9.962888043636931e-06, | |
| "loss": 0.5036, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.13728706624605677, | |
| "grad_norm": 0.47066047930695143, | |
| "learning_rate": 9.958479763263234e-06, | |
| "loss": 0.4972, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.13930599369085173, | |
| "grad_norm": 0.49380951082560154, | |
| "learning_rate": 9.953825238488296e-06, | |
| "loss": 0.4955, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.14132492113564668, | |
| "grad_norm": 0.49487805704511495, | |
| "learning_rate": 9.948924700461727e-06, | |
| "loss": 0.5013, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.14334384858044164, | |
| "grad_norm": 0.4768179474157311, | |
| "learning_rate": 9.94377839255047e-06, | |
| "loss": 0.4945, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.1453627760252366, | |
| "grad_norm": 0.5086022999656893, | |
| "learning_rate": 9.938386570326707e-06, | |
| "loss": 0.5033, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.14738170347003154, | |
| "grad_norm": 0.4947936431118982, | |
| "learning_rate": 9.93274950155518e-06, | |
| "loss": 0.5013, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1494006309148265, | |
| "grad_norm": 0.49370518434763616, | |
| "learning_rate": 9.926867466179883e-06, | |
| "loss": 0.4923, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.15141955835962145, | |
| "grad_norm": 0.49665569470371385, | |
| "learning_rate": 9.920740756310156e-06, | |
| "loss": 0.5038, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1534384858044164, | |
| "grad_norm": 0.46813454785234254, | |
| "learning_rate": 9.914369676206195e-06, | |
| "loss": 0.4875, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.15545741324921136, | |
| "grad_norm": 0.49814972796278695, | |
| "learning_rate": 9.907754542263926e-06, | |
| "loss": 0.4939, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.15747634069400632, | |
| "grad_norm": 0.5218798292518447, | |
| "learning_rate": 9.900895682999301e-06, | |
| "loss": 0.501, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.15949526813880127, | |
| "grad_norm": 0.5913103279447773, | |
| "learning_rate": 9.893793439031982e-06, | |
| "loss": 0.4927, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.16151419558359623, | |
| "grad_norm": 0.5125446596453367, | |
| "learning_rate": 9.88644816306842e-06, | |
| "loss": 0.4971, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.16353312302839118, | |
| "grad_norm": 0.4483762668624327, | |
| "learning_rate": 9.878860219884347e-06, | |
| "loss": 0.4981, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.1655520504731861, | |
| "grad_norm": 0.4787860866647759, | |
| "learning_rate": 9.871029986306658e-06, | |
| "loss": 0.4964, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.16757097791798106, | |
| "grad_norm": 0.5030394247546482, | |
| "learning_rate": 9.862957851194694e-06, | |
| "loss": 0.4891, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.16958990536277602, | |
| "grad_norm": 0.5239985323894606, | |
| "learning_rate": 9.854644215420933e-06, | |
| "loss": 0.501, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.17160883280757097, | |
| "grad_norm": 0.4934736601735223, | |
| "learning_rate": 9.846089491851089e-06, | |
| "loss": 0.5012, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.17362776025236593, | |
| "grad_norm": 0.5649851363661309, | |
| "learning_rate": 9.837294105323592e-06, | |
| "loss": 0.501, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.17564668769716088, | |
| "grad_norm": 0.5032183289408247, | |
| "learning_rate": 9.828258492628506e-06, | |
| "loss": 0.5019, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.17766561514195583, | |
| "grad_norm": 0.5408394773542722, | |
| "learning_rate": 9.818983102485832e-06, | |
| "loss": 0.4914, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.1796845425867508, | |
| "grad_norm": 0.47080262308715565, | |
| "learning_rate": 9.809468395523225e-06, | |
| "loss": 0.4874, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.18170347003154574, | |
| "grad_norm": 0.5707749159791521, | |
| "learning_rate": 9.799714844253109e-06, | |
| "loss": 0.4995, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.1837223974763407, | |
| "grad_norm": 0.48122327046673424, | |
| "learning_rate": 9.789722933049227e-06, | |
| "loss": 0.4892, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.18574132492113565, | |
| "grad_norm": 0.45082444705324104, | |
| "learning_rate": 9.779493158122578e-06, | |
| "loss": 0.4909, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1877602523659306, | |
| "grad_norm": 0.5068314998947344, | |
| "learning_rate": 9.769026027496773e-06, | |
| "loss": 0.4881, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.18977917981072556, | |
| "grad_norm": 0.44089908003969003, | |
| "learning_rate": 9.758322060982813e-06, | |
| "loss": 0.4923, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.19179810725552052, | |
| "grad_norm": 0.4786006068183123, | |
| "learning_rate": 9.747381790153262e-06, | |
| "loss": 0.4912, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.19381703470031544, | |
| "grad_norm": 0.4531421936277408, | |
| "learning_rate": 9.736205758315864e-06, | |
| "loss": 0.4744, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1958359621451104, | |
| "grad_norm": 0.4492650045467347, | |
| "learning_rate": 9.724794520486551e-06, | |
| "loss": 0.485, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.19785488958990535, | |
| "grad_norm": 0.4616235423638888, | |
| "learning_rate": 9.713148643361884e-06, | |
| "loss": 0.488, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.1998738170347003, | |
| "grad_norm": 0.5078516721505932, | |
| "learning_rate": 9.701268705290907e-06, | |
| "loss": 0.4886, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.20189274447949526, | |
| "grad_norm": 0.5292871585531458, | |
| "learning_rate": 9.689155296246431e-06, | |
| "loss": 0.4897, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.20391167192429022, | |
| "grad_norm": 0.4614297555636878, | |
| "learning_rate": 9.67680901779573e-06, | |
| "loss": 0.4809, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.20593059936908517, | |
| "grad_norm": 0.4853770956696164, | |
| "learning_rate": 9.664230483070669e-06, | |
| "loss": 0.4848, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.20794952681388013, | |
| "grad_norm": 0.45954875393494693, | |
| "learning_rate": 9.651420316737254e-06, | |
| "loss": 0.4817, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.20996845425867508, | |
| "grad_norm": 0.4539790244117171, | |
| "learning_rate": 9.638379154964615e-06, | |
| "loss": 0.4927, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.21198738170347003, | |
| "grad_norm": 0.4999117191134038, | |
| "learning_rate": 9.625107645393398e-06, | |
| "loss": 0.4906, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.214006309148265, | |
| "grad_norm": 0.5315614747354604, | |
| "learning_rate": 9.611606447103625e-06, | |
| "loss": 0.4701, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.21602523659305994, | |
| "grad_norm": 0.4465543761029164, | |
| "learning_rate": 9.597876230581946e-06, | |
| "loss": 0.4844, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2180441640378549, | |
| "grad_norm": 0.5240713468337838, | |
| "learning_rate": 9.583917677688348e-06, | |
| "loss": 0.4802, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.22006309148264985, | |
| "grad_norm": 0.4797396408083518, | |
| "learning_rate": 9.56973148162229e-06, | |
| "loss": 0.4822, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.2220820189274448, | |
| "grad_norm": 0.5124904557581254, | |
| "learning_rate": 9.555318346888287e-06, | |
| "loss": 0.4887, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.22410094637223973, | |
| "grad_norm": 0.448720333545146, | |
| "learning_rate": 9.540678989260907e-06, | |
| "loss": 0.4804, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2261198738170347, | |
| "grad_norm": 0.4479157257263002, | |
| "learning_rate": 9.525814135749238e-06, | |
| "loss": 0.4776, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.22813880126182964, | |
| "grad_norm": 0.4630993156519356, | |
| "learning_rate": 9.510724524560783e-06, | |
| "loss": 0.4753, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2301577287066246, | |
| "grad_norm": 0.46574160333254905, | |
| "learning_rate": 9.495410905064795e-06, | |
| "loss": 0.4802, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.23217665615141955, | |
| "grad_norm": 0.532351729710861, | |
| "learning_rate": 9.479874037755061e-06, | |
| "loss": 0.4826, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2341955835962145, | |
| "grad_norm": 0.4629220502691385, | |
| "learning_rate": 9.46411469421214e-06, | |
| "loss": 0.4802, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.23621451104100946, | |
| "grad_norm": 0.459458980682449, | |
| "learning_rate": 9.448133657065045e-06, | |
| "loss": 0.4863, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.23823343848580442, | |
| "grad_norm": 0.4652002127480942, | |
| "learning_rate": 9.431931719952372e-06, | |
| "loss": 0.4785, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.24025236593059937, | |
| "grad_norm": 0.466540922054408, | |
| "learning_rate": 9.415509687482892e-06, | |
| "loss": 0.4719, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.24227129337539433, | |
| "grad_norm": 0.49341259273484933, | |
| "learning_rate": 9.398868375195587e-06, | |
| "loss": 0.4926, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.24429022082018928, | |
| "grad_norm": 0.4728385366149605, | |
| "learning_rate": 9.382008609519159e-06, | |
| "loss": 0.4741, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.24630914826498423, | |
| "grad_norm": 0.5027959432042377, | |
| "learning_rate": 9.364931227730978e-06, | |
| "loss": 0.4826, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.2483280757097792, | |
| "grad_norm": 0.4883346568815589, | |
| "learning_rate": 9.34763707791551e-06, | |
| "loss": 0.4837, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.25034700315457414, | |
| "grad_norm": 0.4689919758806841, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.4834, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.25236593059936907, | |
| "grad_norm": 0.5054719137784303, | |
| "learning_rate": 9.312401920322793e-06, | |
| "loss": 0.4858, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.25438485804416405, | |
| "grad_norm": 0.4829012063372003, | |
| "learning_rate": 9.294462662368211e-06, | |
| "loss": 0.4757, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.256403785488959, | |
| "grad_norm": 0.4627206332424085, | |
| "learning_rate": 9.276310135944777e-06, | |
| "loss": 0.4783, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.25842271293375396, | |
| "grad_norm": 0.4497460790448332, | |
| "learning_rate": 9.257945242530002e-06, | |
| "loss": 0.4785, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2604416403785489, | |
| "grad_norm": 0.44985051561838785, | |
| "learning_rate": 9.239368894147812e-06, | |
| "loss": 0.4786, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.26246056782334387, | |
| "grad_norm": 0.48240095905903635, | |
| "learning_rate": 9.220582013323258e-06, | |
| "loss": 0.4807, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2644794952681388, | |
| "grad_norm": 0.48132105969556493, | |
| "learning_rate": 9.201585533036696e-06, | |
| "loss": 0.4706, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2664984227129338, | |
| "grad_norm": 0.46675857398369297, | |
| "learning_rate": 9.18238039667746e-06, | |
| "loss": 0.4879, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2685173501577287, | |
| "grad_norm": 0.4665634722250575, | |
| "learning_rate": 9.162967557997003e-06, | |
| "loss": 0.4799, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.27053627760252363, | |
| "grad_norm": 0.4432315628923224, | |
| "learning_rate": 9.14334798106155e-06, | |
| "loss": 0.4757, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.2725552050473186, | |
| "grad_norm": 0.5000831797296084, | |
| "learning_rate": 9.123522640204198e-06, | |
| "loss": 0.4805, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.27457413249211354, | |
| "grad_norm": 0.4441933490286196, | |
| "learning_rate": 9.103492519976555e-06, | |
| "loss": 0.4769, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.2765930599369085, | |
| "grad_norm": 0.4805005293661477, | |
| "learning_rate": 9.083258615099819e-06, | |
| "loss": 0.4777, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.27861198738170345, | |
| "grad_norm": 0.4445460559749252, | |
| "learning_rate": 9.062821930415404e-06, | |
| "loss": 0.4728, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.28063091482649843, | |
| "grad_norm": 0.4333540273890526, | |
| "learning_rate": 9.042183480835018e-06, | |
| "loss": 0.4705, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.28264984227129336, | |
| "grad_norm": 0.48259999792627817, | |
| "learning_rate": 9.021344291290275e-06, | |
| "loss": 0.4782, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.28466876971608834, | |
| "grad_norm": 0.439829603884435, | |
| "learning_rate": 9.000305396681788e-06, | |
| "loss": 0.4705, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.28668769716088327, | |
| "grad_norm": 0.4776753948778827, | |
| "learning_rate": 8.97906784182778e-06, | |
| "loss": 0.4694, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.28870662460567825, | |
| "grad_norm": 0.49821375314109334, | |
| "learning_rate": 8.95763268141219e-06, | |
| "loss": 0.4698, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.2907255520504732, | |
| "grad_norm": 0.4626475218122892, | |
| "learning_rate": 8.936000979932304e-06, | |
| "loss": 0.4766, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.29274447949526816, | |
| "grad_norm": 0.4797061046529278, | |
| "learning_rate": 8.914173811645884e-06, | |
| "loss": 0.4773, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.2947634069400631, | |
| "grad_norm": 0.5067089149884695, | |
| "learning_rate": 8.892152260517824e-06, | |
| "loss": 0.4652, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.29678233438485807, | |
| "grad_norm": 0.5216910139465747, | |
| "learning_rate": 8.869937420166312e-06, | |
| "loss": 0.475, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.298801261829653, | |
| "grad_norm": 0.4655884354714463, | |
| "learning_rate": 8.847530393808534e-06, | |
| "loss": 0.4811, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3008201892744479, | |
| "grad_norm": 0.47533704121654896, | |
| "learning_rate": 8.824932294205868e-06, | |
| "loss": 0.4723, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3028391167192429, | |
| "grad_norm": 0.479585079565894, | |
| "learning_rate": 8.802144243608633e-06, | |
| "loss": 0.4728, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.30485804416403783, | |
| "grad_norm": 0.4338897481002412, | |
| "learning_rate": 8.779167373700359e-06, | |
| "loss": 0.4687, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.3068769716088328, | |
| "grad_norm": 0.51284609503485, | |
| "learning_rate": 8.756002825541583e-06, | |
| "loss": 0.4795, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.30889589905362774, | |
| "grad_norm": 0.5019515223957067, | |
| "learning_rate": 8.73265174951318e-06, | |
| "loss": 0.4782, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.3109148264984227, | |
| "grad_norm": 0.5764514697725889, | |
| "learning_rate": 8.709115305259233e-06, | |
| "loss": 0.4722, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.31293375394321765, | |
| "grad_norm": 0.48602580093856246, | |
| "learning_rate": 8.685394661629453e-06, | |
| "loss": 0.4764, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.31495268138801263, | |
| "grad_norm": 0.43106495742778833, | |
| "learning_rate": 8.661490996621125e-06, | |
| "loss": 0.4749, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.31697160883280756, | |
| "grad_norm": 0.4879400234119546, | |
| "learning_rate": 8.637405497320599e-06, | |
| "loss": 0.4685, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.31899053627760254, | |
| "grad_norm": 0.48144632266967335, | |
| "learning_rate": 8.61313935984436e-06, | |
| "loss": 0.473, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.32100946372239747, | |
| "grad_norm": 0.4824945838927638, | |
| "learning_rate": 8.588693789279605e-06, | |
| "loss": 0.467, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.32302839116719245, | |
| "grad_norm": 0.48854234491790616, | |
| "learning_rate": 8.56406999962441e-06, | |
| "loss": 0.4715, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.3250473186119874, | |
| "grad_norm": 0.43356661154496373, | |
| "learning_rate": 8.539269213727434e-06, | |
| "loss": 0.4643, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.32706624605678236, | |
| "grad_norm": 0.45038524364573934, | |
| "learning_rate": 8.514292663227196e-06, | |
| "loss": 0.4753, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.3290851735015773, | |
| "grad_norm": 0.5390076730569227, | |
| "learning_rate": 8.489141588490914e-06, | |
| "loss": 0.4692, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.3311041009463722, | |
| "grad_norm": 0.5166665541948204, | |
| "learning_rate": 8.46381723855289e-06, | |
| "loss": 0.4626, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.3331230283911672, | |
| "grad_norm": 0.4973742178832253, | |
| "learning_rate": 8.438320871052501e-06, | |
| "loss": 0.4722, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.3351419558359621, | |
| "grad_norm": 0.4625137579015689, | |
| "learning_rate": 8.412653752171732e-06, | |
| "loss": 0.4591, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.3371608832807571, | |
| "grad_norm": 0.48424933592035946, | |
| "learning_rate": 8.386817156572298e-06, | |
| "loss": 0.48, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.33917981072555203, | |
| "grad_norm": 0.4532582849486311, | |
| "learning_rate": 8.360812367332342e-06, | |
| "loss": 0.469, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.341198738170347, | |
| "grad_norm": 0.4456128521040015, | |
| "learning_rate": 8.33464067588272e-06, | |
| "loss": 0.4701, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.34321766561514194, | |
| "grad_norm": 0.47975070454468693, | |
| "learning_rate": 8.30830338194286e-06, | |
| "loss": 0.4742, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.3452365930599369, | |
| "grad_norm": 0.44758135515111036, | |
| "learning_rate": 8.281801793456223e-06, | |
| "loss": 0.4563, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.34725552050473185, | |
| "grad_norm": 0.539530429343652, | |
| "learning_rate": 8.255137226525346e-06, | |
| "loss": 0.4678, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.34927444794952683, | |
| "grad_norm": 0.44634229380095736, | |
| "learning_rate": 8.22831100534648e-06, | |
| "loss": 0.4688, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.35129337539432176, | |
| "grad_norm": 0.41960408937669463, | |
| "learning_rate": 8.201324462143827e-06, | |
| "loss": 0.4542, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.35331230283911674, | |
| "grad_norm": 0.4791531544406239, | |
| "learning_rate": 8.174178937103397e-06, | |
| "loss": 0.4743, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.35533123028391167, | |
| "grad_norm": 0.4352818312435405, | |
| "learning_rate": 8.14687577830643e-06, | |
| "loss": 0.4604, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.35735015772870665, | |
| "grad_norm": 0.4672874879206997, | |
| "learning_rate": 8.11941634166246e-06, | |
| "loss": 0.4712, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.3593690851735016, | |
| "grad_norm": 0.4637056308756207, | |
| "learning_rate": 8.09180199084198e-06, | |
| "loss": 0.4668, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.3613880126182965, | |
| "grad_norm": 0.44264459219210456, | |
| "learning_rate": 8.064034097208712e-06, | |
| "loss": 0.4596, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.3634069400630915, | |
| "grad_norm": 0.49462230789807315, | |
| "learning_rate": 8.036114039751516e-06, | |
| "loss": 0.4667, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.3654258675078864, | |
| "grad_norm": 0.4634309345247295, | |
| "learning_rate": 8.0080432050159e-06, | |
| "loss": 0.4604, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.3674447949526814, | |
| "grad_norm": 0.4999816402185204, | |
| "learning_rate": 7.979822987035157e-06, | |
| "loss": 0.4644, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.3694637223974763, | |
| "grad_norm": 0.47642327436037435, | |
| "learning_rate": 7.951454787261148e-06, | |
| "loss": 0.4677, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.3714826498422713, | |
| "grad_norm": 0.4953986824691425, | |
| "learning_rate": 7.922940014494693e-06, | |
| "loss": 0.4593, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.37350157728706623, | |
| "grad_norm": 0.4817430868786167, | |
| "learning_rate": 7.894280084815618e-06, | |
| "loss": 0.4689, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.3755205047318612, | |
| "grad_norm": 0.4356746961237441, | |
| "learning_rate": 7.86547642151242e-06, | |
| "loss": 0.4611, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.37753943217665614, | |
| "grad_norm": 0.4905696785040258, | |
| "learning_rate": 7.836530455011594e-06, | |
| "loss": 0.4647, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3795583596214511, | |
| "grad_norm": 0.4730705035791906, | |
| "learning_rate": 7.807443622806592e-06, | |
| "loss": 0.4643, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.38157728706624605, | |
| "grad_norm": 0.4626721574097504, | |
| "learning_rate": 7.778217369386434e-06, | |
| "loss": 0.4621, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.38359621451104103, | |
| "grad_norm": 0.5095748907639542, | |
| "learning_rate": 7.748853146163978e-06, | |
| "loss": 0.4702, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.38561514195583596, | |
| "grad_norm": 0.45666908318697785, | |
| "learning_rate": 7.719352411403836e-06, | |
| "loss": 0.4584, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3876340694006309, | |
| "grad_norm": 0.5831960484166556, | |
| "learning_rate": 7.68971663014995e-06, | |
| "loss": 0.4581, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.38965299684542587, | |
| "grad_norm": 0.43083136772614855, | |
| "learning_rate": 7.659947274152857e-06, | |
| "loss": 0.4595, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3916719242902208, | |
| "grad_norm": 0.44527597409724967, | |
| "learning_rate": 7.630045821796567e-06, | |
| "loss": 0.4603, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.3936908517350158, | |
| "grad_norm": 0.5085813030698595, | |
| "learning_rate": 7.60001375802518e-06, | |
| "loss": 0.4726, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.3957097791798107, | |
| "grad_norm": 0.5516780680340057, | |
| "learning_rate": 7.5698525742691145e-06, | |
| "loss": 0.4553, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.3977287066246057, | |
| "grad_norm": 0.4473799858576117, | |
| "learning_rate": 7.5395637683710534e-06, | |
| "loss": 0.4652, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3997476340694006, | |
| "grad_norm": 0.4594068744960154, | |
| "learning_rate": 7.509148844511556e-06, | |
| "loss": 0.4569, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.4017665615141956, | |
| "grad_norm": 0.4195303708920141, | |
| "learning_rate": 7.478609313134367e-06, | |
| "loss": 0.4608, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.4037854889589905, | |
| "grad_norm": 0.5224611450690858, | |
| "learning_rate": 7.447946690871387e-06, | |
| "loss": 0.4618, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.4058044164037855, | |
| "grad_norm": 0.5037868351130359, | |
| "learning_rate": 7.4171625004673765e-06, | |
| "loss": 0.4549, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.40782334384858043, | |
| "grad_norm": 0.6617292048565804, | |
| "learning_rate": 7.386258270704318e-06, | |
| "loss": 0.4648, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.4098422712933754, | |
| "grad_norm": 0.46778481767707863, | |
| "learning_rate": 7.355235536325507e-06, | |
| "loss": 0.4618, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.41186119873817034, | |
| "grad_norm": 0.4467025867029599, | |
| "learning_rate": 7.32409583795932e-06, | |
| "loss": 0.4556, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.4138801261829653, | |
| "grad_norm": 0.4864826536631601, | |
| "learning_rate": 7.292840722042721e-06, | |
| "loss": 0.4649, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.41589905362776025, | |
| "grad_norm": 0.44674303304698915, | |
| "learning_rate": 7.261471740744453e-06, | |
| "loss": 0.4546, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.4179179810725552, | |
| "grad_norm": 0.4638264920705765, | |
| "learning_rate": 7.229990451887961e-06, | |
| "loss": 0.459, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.41993690851735016, | |
| "grad_norm": 0.44062835097290626, | |
| "learning_rate": 7.1983984188740215e-06, | |
| "loss": 0.4627, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.4219558359621451, | |
| "grad_norm": 0.4596889143174662, | |
| "learning_rate": 7.1666972106031105e-06, | |
| "loss": 0.4584, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.42397476340694007, | |
| "grad_norm": 0.4866244769293159, | |
| "learning_rate": 7.134888401397484e-06, | |
| "loss": 0.4629, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.425993690851735, | |
| "grad_norm": 0.5201676761862537, | |
| "learning_rate": 7.102973570922997e-06, | |
| "loss": 0.4549, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.42801261829653, | |
| "grad_norm": 0.4790871394995968, | |
| "learning_rate": 7.070954304110656e-06, | |
| "loss": 0.4632, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.4300315457413249, | |
| "grad_norm": 0.4659786396553266, | |
| "learning_rate": 7.03883219107791e-06, | |
| "loss": 0.4581, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.4320504731861199, | |
| "grad_norm": 0.4104121258537053, | |
| "learning_rate": 7.0066088270496775e-06, | |
| "loss": 0.4596, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.4340694006309148, | |
| "grad_norm": 0.46988504036618683, | |
| "learning_rate": 6.9742858122791355e-06, | |
| "loss": 0.4648, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.4360883280757098, | |
| "grad_norm": 0.6098242782979109, | |
| "learning_rate": 6.941864751968239e-06, | |
| "loss": 0.4548, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.4381072555205047, | |
| "grad_norm": 0.7557069029520974, | |
| "learning_rate": 6.90934725618801e-06, | |
| "loss": 0.461, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.4401261829652997, | |
| "grad_norm": 0.46196984209426056, | |
| "learning_rate": 6.876734939798584e-06, | |
| "loss": 0.4631, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.44214511041009463, | |
| "grad_norm": 0.45367052105694244, | |
| "learning_rate": 6.844029422368998e-06, | |
| "loss": 0.4567, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.4441640378548896, | |
| "grad_norm": 0.4923825492028847, | |
| "learning_rate": 6.811232328096778e-06, | |
| "loss": 0.4619, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.44618296529968454, | |
| "grad_norm": 0.4412882669817311, | |
| "learning_rate": 6.7783452857272725e-06, | |
| "loss": 0.4472, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.44820189274447947, | |
| "grad_norm": 0.4804805684578094, | |
| "learning_rate": 6.745369928472764e-06, | |
| "loss": 0.4634, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.45022082018927445, | |
| "grad_norm": 0.46765384977250785, | |
| "learning_rate": 6.712307893931364e-06, | |
| "loss": 0.4595, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.4522397476340694, | |
| "grad_norm": 1.0068029764875739, | |
| "learning_rate": 6.67916082400569e-06, | |
| "loss": 0.457, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.45425867507886436, | |
| "grad_norm": 0.4828558935559972, | |
| "learning_rate": 6.645930364821327e-06, | |
| "loss": 0.465, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.4562776025236593, | |
| "grad_norm": 0.4571716607551568, | |
| "learning_rate": 6.612618166645068e-06, | |
| "loss": 0.4521, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.45829652996845427, | |
| "grad_norm": 0.49891810921169605, | |
| "learning_rate": 6.579225883802981e-06, | |
| "loss": 0.4621, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.4603154574132492, | |
| "grad_norm": 0.6812817990255348, | |
| "learning_rate": 6.5457551745982275e-06, | |
| "loss": 0.4553, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.4623343848580442, | |
| "grad_norm": 0.466296009966539, | |
| "learning_rate": 6.512207701228727e-06, | |
| "loss": 0.4585, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.4643533123028391, | |
| "grad_norm": 0.48144166164862184, | |
| "learning_rate": 6.478585129704608e-06, | |
| "loss": 0.4579, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.4663722397476341, | |
| "grad_norm": 0.4575454070862058, | |
| "learning_rate": 6.444889129765465e-06, | |
| "loss": 0.453, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.468391167192429, | |
| "grad_norm": 0.5049212463992705, | |
| "learning_rate": 6.4111213747974385e-06, | |
| "loss": 0.4606, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.470410094637224, | |
| "grad_norm": 0.44196403700173553, | |
| "learning_rate": 6.377283541750123e-06, | |
| "loss": 0.4528, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.4724290220820189, | |
| "grad_norm": 0.44557725863878417, | |
| "learning_rate": 6.343377311053271e-06, | |
| "loss": 0.4614, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.4744479495268139, | |
| "grad_norm": 0.485138333868983, | |
| "learning_rate": 6.309404366533355e-06, | |
| "loss": 0.4662, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.47646687697160883, | |
| "grad_norm": 0.44703181568848005, | |
| "learning_rate": 6.275366395329934e-06, | |
| "loss": 0.4484, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.47848580441640376, | |
| "grad_norm": 0.46684239360571517, | |
| "learning_rate": 6.241265087811883e-06, | |
| "loss": 0.461, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.48050473186119874, | |
| "grad_norm": 0.4308686375763305, | |
| "learning_rate": 6.207102137493433e-06, | |
| "loss": 0.4572, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.48252365930599367, | |
| "grad_norm": 0.4349332573681559, | |
| "learning_rate": 6.172879240950075e-06, | |
| "loss": 0.4586, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.48454258675078865, | |
| "grad_norm": 0.4946893193107334, | |
| "learning_rate": 6.138598097734308e-06, | |
| "loss": 0.4613, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4865615141955836, | |
| "grad_norm": 0.4278600145969151, | |
| "learning_rate": 6.104260410291237e-06, | |
| "loss": 0.4524, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.48858044164037856, | |
| "grad_norm": 0.438261930357501, | |
| "learning_rate": 6.069867883874017e-06, | |
| "loss": 0.4546, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.4905993690851735, | |
| "grad_norm": 0.4834328335496326, | |
| "learning_rate": 6.035422226459183e-06, | |
| "loss": 0.4601, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.49261829652996847, | |
| "grad_norm": 0.4560202224259225, | |
| "learning_rate": 6.000925148661822e-06, | |
| "loss": 0.4552, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.4946372239747634, | |
| "grad_norm": 0.46242134458299394, | |
| "learning_rate": 5.966378363650618e-06, | |
| "loss": 0.4534, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.4966561514195584, | |
| "grad_norm": 0.509798379798209, | |
| "learning_rate": 5.931783587062785e-06, | |
| "loss": 0.4478, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.4986750788643533, | |
| "grad_norm": 0.4916270732208669, | |
| "learning_rate": 5.897142536918854e-06, | |
| "loss": 0.4606, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.5006940063091483, | |
| "grad_norm": 0.4493237090311729, | |
| "learning_rate": 5.8624569335373625e-06, | |
| "loss": 0.4512, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.5027129337539432, | |
| "grad_norm": 0.4570244860395198, | |
| "learning_rate": 5.827728499449417e-06, | |
| "loss": 0.4503, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.5047318611987381, | |
| "grad_norm": 0.5044698787582333, | |
| "learning_rate": 5.792958959313155e-06, | |
| "loss": 0.4586, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.5067507886435331, | |
| "grad_norm": 0.4567238497826546, | |
| "learning_rate": 5.758150039828084e-06, | |
| "loss": 0.442, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.5087697160883281, | |
| "grad_norm": 0.49456514738525703, | |
| "learning_rate": 5.723303469649352e-06, | |
| "loss": 0.4524, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.510788643533123, | |
| "grad_norm": 0.46129063291343286, | |
| "learning_rate": 5.688420979301876e-06, | |
| "loss": 0.4533, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.512807570977918, | |
| "grad_norm": 0.46780830492612324, | |
| "learning_rate": 5.6535043010944226e-06, | |
| "loss": 0.4537, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.5148264984227129, | |
| "grad_norm": 0.49663726459668367, | |
| "learning_rate": 5.618555169033567e-06, | |
| "loss": 0.4624, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.5168454258675079, | |
| "grad_norm": 0.4323698197626643, | |
| "learning_rate": 5.583575318737588e-06, | |
| "loss": 0.4394, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.5188643533123029, | |
| "grad_norm": 0.433967581020834, | |
| "learning_rate": 5.548566487350264e-06, | |
| "loss": 0.4482, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.5208832807570978, | |
| "grad_norm": 0.5093937708469708, | |
| "learning_rate": 5.513530413454618e-06, | |
| "loss": 0.4488, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.5229022082018927, | |
| "grad_norm": 0.4479313579903872, | |
| "learning_rate": 5.478468836986568e-06, | |
| "loss": 0.4547, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.5249211356466877, | |
| "grad_norm": 0.4784953987086218, | |
| "learning_rate": 5.443383499148519e-06, | |
| "loss": 0.4536, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.5269400630914827, | |
| "grad_norm": 0.4311672354723456, | |
| "learning_rate": 5.408276142322904e-06, | |
| "loss": 0.4489, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.5289589905362776, | |
| "grad_norm": 0.4320377418884208, | |
| "learning_rate": 5.373148509985636e-06, | |
| "loss": 0.457, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.5309779179810725, | |
| "grad_norm": 0.4475608921551904, | |
| "learning_rate": 5.3380023466195455e-06, | |
| "loss": 0.4491, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.5329968454258676, | |
| "grad_norm": 0.4150312768857391, | |
| "learning_rate": 5.302839397627732e-06, | |
| "loss": 0.4509, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.5350157728706625, | |
| "grad_norm": 0.4842663915075593, | |
| "learning_rate": 5.267661409246893e-06, | |
| "loss": 0.4497, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.5370347003154574, | |
| "grad_norm": 0.48020608359904937, | |
| "learning_rate": 5.232470128460597e-06, | |
| "loss": 0.4455, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.5390536277602523, | |
| "grad_norm": 0.460861374110654, | |
| "learning_rate": 5.197267302912535e-06, | |
| "loss": 0.4633, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.5410725552050473, | |
| "grad_norm": 0.4861656655295904, | |
| "learning_rate": 5.162054680819724e-06, | |
| "loss": 0.455, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.5430914826498423, | |
| "grad_norm": 0.4280331150720691, | |
| "learning_rate": 5.12683401088569e-06, | |
| "loss": 0.4527, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.5451104100946372, | |
| "grad_norm": 0.4803934844859794, | |
| "learning_rate": 5.091607042213621e-06, | |
| "loss": 0.4507, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.5471293375394322, | |
| "grad_norm": 0.44679887041219724, | |
| "learning_rate": 5.056375524219516e-06, | |
| "loss": 0.4467, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.5491482649842271, | |
| "grad_norm": 0.4562693718725788, | |
| "learning_rate": 5.021141206545293e-06, | |
| "loss": 0.4481, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.5511671924290221, | |
| "grad_norm": 0.4568391909726876, | |
| "learning_rate": 4.9859058389719095e-06, | |
| "loss": 0.4467, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.553186119873817, | |
| "grad_norm": 0.4402113900197009, | |
| "learning_rate": 4.950671171332458e-06, | |
| "loss": 0.4438, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.555205047318612, | |
| "grad_norm": 0.4694901686129437, | |
| "learning_rate": 4.915438953425277e-06, | |
| "loss": 0.455, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.5572239747634069, | |
| "grad_norm": 0.5092377388071306, | |
| "learning_rate": 4.8802109349270415e-06, | |
| "loss": 0.4431, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.5592429022082019, | |
| "grad_norm": 0.5334478668234227, | |
| "learning_rate": 4.844988865305883e-06, | |
| "loss": 0.4457, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.5612618296529969, | |
| "grad_norm": 0.450190134489615, | |
| "learning_rate": 4.809774493734507e-06, | |
| "loss": 0.4468, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.5632807570977918, | |
| "grad_norm": 0.6136460189487958, | |
| "learning_rate": 4.774569569003314e-06, | |
| "loss": 0.4531, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.5652996845425867, | |
| "grad_norm": 0.47669716943494894, | |
| "learning_rate": 4.739375839433575e-06, | |
| "loss": 0.4559, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.5673186119873816, | |
| "grad_norm": 0.47396268716032475, | |
| "learning_rate": 4.704195052790584e-06, | |
| "loss": 0.4463, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.5693375394321767, | |
| "grad_norm": 0.5115114860936024, | |
| "learning_rate": 4.66902895619688e-06, | |
| "loss": 0.4494, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.5713564668769716, | |
| "grad_norm": 0.4561358589231192, | |
| "learning_rate": 4.633879296045474e-06, | |
| "loss": 0.4477, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.5733753943217665, | |
| "grad_norm": 0.4547946639578122, | |
| "learning_rate": 4.59874781791312e-06, | |
| "loss": 0.4502, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.5753943217665615, | |
| "grad_norm": 0.5036899795994354, | |
| "learning_rate": 4.56363626647363e-06, | |
| "loss": 0.4553, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.5774132492113565, | |
| "grad_norm": 0.46515739613875146, | |
| "learning_rate": 4.528546385411239e-06, | |
| "loss": 0.4373, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.5794321766561514, | |
| "grad_norm": 0.4834989187992637, | |
| "learning_rate": 4.493479917333989e-06, | |
| "loss": 0.453, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.5814511041009464, | |
| "grad_norm": 0.45922181466381384, | |
| "learning_rate": 4.4584386036872155e-06, | |
| "loss": 0.4527, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.5834700315457413, | |
| "grad_norm": 0.46803871925708684, | |
| "learning_rate": 4.42342418466705e-06, | |
| "loss": 0.4426, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.5854889589905363, | |
| "grad_norm": 0.47319916436776355, | |
| "learning_rate": 4.388438399134e-06, | |
| "loss": 0.4547, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.5875078864353313, | |
| "grad_norm": 0.4598569796278221, | |
| "learning_rate": 4.3534829845265996e-06, | |
| "loss": 0.4441, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.5895268138801262, | |
| "grad_norm": 0.47100191118178447, | |
| "learning_rate": 4.318559676775126e-06, | |
| "loss": 0.4472, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.5915457413249211, | |
| "grad_norm": 0.495923691841689, | |
| "learning_rate": 4.283670210215384e-06, | |
| "loss": 0.4516, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.5935646687697161, | |
| "grad_norm": 0.4391764043906036, | |
| "learning_rate": 4.248816317502585e-06, | |
| "loss": 0.4468, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.5955835962145111, | |
| "grad_norm": 0.47900304182889214, | |
| "learning_rate": 4.213999729525304e-06, | |
| "loss": 0.452, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.597602523659306, | |
| "grad_norm": 0.4894376668991494, | |
| "learning_rate": 4.179222175319502e-06, | |
| "loss": 0.4418, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.5996214511041009, | |
| "grad_norm": 0.4698266233405637, | |
| "learning_rate": 4.1444853819826845e-06, | |
| "loss": 0.4503, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.6016403785488958, | |
| "grad_norm": 0.4601994675920038, | |
| "learning_rate": 4.1097910745881175e-06, | |
| "loss": 0.4421, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.6036593059936909, | |
| "grad_norm": 0.6167906848346414, | |
| "learning_rate": 4.0751409760991585e-06, | |
| "loss": 0.4469, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.6056782334384858, | |
| "grad_norm": 0.5051790895352023, | |
| "learning_rate": 4.040536807283696e-06, | |
| "loss": 0.4502, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.6076971608832807, | |
| "grad_norm": 0.45322450050363433, | |
| "learning_rate": 4.005980286628698e-06, | |
| "loss": 0.4505, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.6097160883280757, | |
| "grad_norm": 0.4910869337898713, | |
| "learning_rate": 3.9714731302548525e-06, | |
| "loss": 0.4486, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.6117350157728707, | |
| "grad_norm": 0.447631013542978, | |
| "learning_rate": 3.937017051831367e-06, | |
| "loss": 0.4478, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.6137539432176656, | |
| "grad_norm": 0.7018069369756337, | |
| "learning_rate": 3.9026137624908466e-06, | |
| "loss": 0.4492, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.6157728706624606, | |
| "grad_norm": 0.4762076148803905, | |
| "learning_rate": 3.868264970744324e-06, | |
| "loss": 0.4513, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.6177917981072555, | |
| "grad_norm": 0.5020509623933802, | |
| "learning_rate": 3.833972382396412e-06, | |
| "loss": 0.4482, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.6198107255520505, | |
| "grad_norm": 0.44347011131653674, | |
| "learning_rate": 3.799737700460594e-06, | |
| "loss": 0.4441, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.6218296529968455, | |
| "grad_norm": 0.45353690385677753, | |
| "learning_rate": 3.7655626250746446e-06, | |
| "loss": 0.4443, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.6238485804416404, | |
| "grad_norm": 0.44404552147051596, | |
| "learning_rate": 3.7314488534162007e-06, | |
| "loss": 0.4509, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.6258675078864353, | |
| "grad_norm": 0.4759693579595236, | |
| "learning_rate": 3.6973980796184843e-06, | |
| "loss": 0.4573, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.6278864353312302, | |
| "grad_norm": 0.45248182104327295, | |
| "learning_rate": 3.6634119946861556e-06, | |
| "loss": 0.4442, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.6299053627760253, | |
| "grad_norm": 0.44605725901650006, | |
| "learning_rate": 3.629492286411349e-06, | |
| "loss": 0.4515, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.6319242902208202, | |
| "grad_norm": 0.4770842161429305, | |
| "learning_rate": 3.5956406392898484e-06, | |
| "loss": 0.4481, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.6339432176656151, | |
| "grad_norm": 0.4611511904942963, | |
| "learning_rate": 3.561858734437432e-06, | |
| "loss": 0.4553, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.63596214511041, | |
| "grad_norm": 0.4886490480648605, | |
| "learning_rate": 3.528148249506391e-06, | |
| "loss": 0.4541, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.6379810725552051, | |
| "grad_norm": 0.5063011038652176, | |
| "learning_rate": 3.494510858602217e-06, | |
| "loss": 0.448, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.5297665998286369, | |
| "learning_rate": 3.4609482322004463e-06, | |
| "loss": 0.45, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.6420189274447949, | |
| "grad_norm": 0.789602090268308, | |
| "learning_rate": 3.427462037063729e-06, | |
| "loss": 0.4499, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.6440378548895899, | |
| "grad_norm": 0.4595511607863831, | |
| "learning_rate": 3.394053936159035e-06, | |
| "loss": 0.4451, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.6460567823343849, | |
| "grad_norm": 0.5053416927018038, | |
| "learning_rate": 3.3607255885750756e-06, | |
| "loss": 0.4488, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.6480757097791798, | |
| "grad_norm": 0.47050871567377006, | |
| "learning_rate": 3.3274786494399102e-06, | |
| "loss": 0.4461, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.6500946372239748, | |
| "grad_norm": 0.46433780403203007, | |
| "learning_rate": 3.2943147698387562e-06, | |
| "loss": 0.4497, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.6521135646687697, | |
| "grad_norm": 0.45409078162209787, | |
| "learning_rate": 3.261235596731982e-06, | |
| "loss": 0.4457, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.6541324921135647, | |
| "grad_norm": 0.4218245687222197, | |
| "learning_rate": 3.228242772873328e-06, | |
| "loss": 0.4437, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.6561514195583596, | |
| "grad_norm": 0.47127385494827945, | |
| "learning_rate": 3.195337936728323e-06, | |
| "loss": 0.4488, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.6581703470031546, | |
| "grad_norm": 0.43568607166498186, | |
| "learning_rate": 3.1625227223929105e-06, | |
| "loss": 0.4415, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.6601892744479495, | |
| "grad_norm": 0.4200623699882891, | |
| "learning_rate": 3.1297987595123024e-06, | |
| "loss": 0.4423, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.6622082018927444, | |
| "grad_norm": 0.45902924831849934, | |
| "learning_rate": 3.0971676732000494e-06, | |
| "loss": 0.4488, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.6642271293375395, | |
| "grad_norm": 0.48610535709106045, | |
| "learning_rate": 3.0646310839573302e-06, | |
| "loss": 0.441, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.6662460567823344, | |
| "grad_norm": 0.5237594196219926, | |
| "learning_rate": 3.03219060759248e-06, | |
| "loss": 0.454, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.6682649842271293, | |
| "grad_norm": 0.4228282553579445, | |
| "learning_rate": 2.9998478551407524e-06, | |
| "loss": 0.4378, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.6702839116719242, | |
| "grad_norm": 0.46926328208349977, | |
| "learning_rate": 2.9676044327842963e-06, | |
| "loss": 0.4475, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.6723028391167193, | |
| "grad_norm": 0.47029450291352376, | |
| "learning_rate": 2.935461941772412e-06, | |
| "loss": 0.4418, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.6743217665615142, | |
| "grad_norm": 0.461936788764074, | |
| "learning_rate": 2.9034219783420183e-06, | |
| "loss": 0.438, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.6763406940063091, | |
| "grad_norm": 0.4879384251763818, | |
| "learning_rate": 2.871486133638378e-06, | |
| "loss": 0.4498, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.6783596214511041, | |
| "grad_norm": 0.47390435742643133, | |
| "learning_rate": 2.8396559936360934e-06, | |
| "loss": 0.4444, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.6803785488958991, | |
| "grad_norm": 0.43781719559966065, | |
| "learning_rate": 2.807933139060337e-06, | |
| "loss": 0.4481, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.682397476340694, | |
| "grad_norm": 0.452166890113836, | |
| "learning_rate": 2.776319145308345e-06, | |
| "loss": 0.4397, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.684416403785489, | |
| "grad_norm": 0.4480696892989974, | |
| "learning_rate": 2.7448155823711897e-06, | |
| "loss": 0.4415, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.6864353312302839, | |
| "grad_norm": 0.482631983733039, | |
| "learning_rate": 2.7134240147558124e-06, | |
| "loss": 0.4494, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.6884542586750788, | |
| "grad_norm": 0.4215351521880855, | |
| "learning_rate": 2.6821460014073187e-06, | |
| "loss": 0.4359, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.6904731861198738, | |
| "grad_norm": 0.44170545621316426, | |
| "learning_rate": 2.6509830956315684e-06, | |
| "loss": 0.4516, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.6924921135646688, | |
| "grad_norm": 0.43846533524679027, | |
| "learning_rate": 2.619936845018031e-06, | |
| "loss": 0.4357, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.6945110410094637, | |
| "grad_norm": 0.5008934216933979, | |
| "learning_rate": 2.5890087913629358e-06, | |
| "loss": 0.4467, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.6965299684542586, | |
| "grad_norm": 0.5250213600953301, | |
| "learning_rate": 2.558200470592701e-06, | |
| "loss": 0.449, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.6985488958990537, | |
| "grad_norm": 0.5362972001626155, | |
| "learning_rate": 2.527513412687654e-06, | |
| "loss": 0.436, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.7005678233438486, | |
| "grad_norm": 0.4447368579357635, | |
| "learning_rate": 2.4969491416060542e-06, | |
| "loss": 0.4393, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.7025867507886435, | |
| "grad_norm": 0.42761346324330285, | |
| "learning_rate": 2.4665091752084175e-06, | |
| "loss": 0.4423, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.7046056782334384, | |
| "grad_norm": 0.45180644732668995, | |
| "learning_rate": 2.4361950251821253e-06, | |
| "loss": 0.4351, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.7066246056782335, | |
| "grad_norm": 0.4655597774869552, | |
| "learning_rate": 2.406008196966359e-06, | |
| "loss": 0.445, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.7086435331230284, | |
| "grad_norm": 0.44607134328593223, | |
| "learning_rate": 2.375950189677342e-06, | |
| "loss": 0.4322, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.7106624605678233, | |
| "grad_norm": 0.45886523824853853, | |
| "learning_rate": 2.346022496033879e-06, | |
| "loss": 0.439, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.7126813880126183, | |
| "grad_norm": 0.439206301308698, | |
| "learning_rate": 2.316226602283243e-06, | |
| "loss": 0.445, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.7147003154574133, | |
| "grad_norm": 0.42444173885080955, | |
| "learning_rate": 2.2865639881273494e-06, | |
| "loss": 0.4402, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.7167192429022082, | |
| "grad_norm": 0.48281990057391616, | |
| "learning_rate": 2.25703612664928e-06, | |
| "loss": 0.445, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.7187381703470032, | |
| "grad_norm": 0.46127829547820787, | |
| "learning_rate": 2.227644484240135e-06, | |
| "loss": 0.4356, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.7207570977917981, | |
| "grad_norm": 0.4610423355631118, | |
| "learning_rate": 2.198390520526194e-06, | |
| "loss": 0.4438, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.722776025236593, | |
| "grad_norm": 0.4711885745663891, | |
| "learning_rate": 2.1692756882964404e-06, | |
| "loss": 0.4464, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.724794952681388, | |
| "grad_norm": 0.5420237747030388, | |
| "learning_rate": 2.140301433430414e-06, | |
| "loss": 0.4436, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.726813880126183, | |
| "grad_norm": 0.47745493287406926, | |
| "learning_rate": 2.1114691948264038e-06, | |
| "loss": 0.4433, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.7288328075709779, | |
| "grad_norm": 0.43683824200923127, | |
| "learning_rate": 2.082780404329988e-06, | |
| "loss": 0.4429, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.7308517350157728, | |
| "grad_norm": 0.42141576682539833, | |
| "learning_rate": 2.0542364866629292e-06, | |
| "loss": 0.4402, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.7328706624605679, | |
| "grad_norm": 0.4304526228372951, | |
| "learning_rate": 2.025838859352428e-06, | |
| "loss": 0.4462, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.7348895899053628, | |
| "grad_norm": 0.4960972537231228, | |
| "learning_rate": 1.997588932660713e-06, | |
| "loss": 0.4298, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.7369085173501577, | |
| "grad_norm": 0.5055917828329974, | |
| "learning_rate": 1.9694881095150125e-06, | |
| "loss": 0.4429, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.7389274447949526, | |
| "grad_norm": 0.4756073151541865, | |
| "learning_rate": 1.9415377854378903e-06, | |
| "loss": 0.4423, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.7409463722397477, | |
| "grad_norm": 0.4293238953341605, | |
| "learning_rate": 1.913739348477927e-06, | |
| "loss": 0.444, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.7429652996845426, | |
| "grad_norm": 0.4476184732471472, | |
| "learning_rate": 1.8860941791408038e-06, | |
| "loss": 0.4432, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.7449842271293375, | |
| "grad_norm": 0.4453341449071723, | |
| "learning_rate": 1.8586036503207316e-06, | |
| "loss": 0.4351, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.7470031545741325, | |
| "grad_norm": 0.4741404781356218, | |
| "learning_rate": 1.8312691272322768e-06, | |
| "loss": 0.4445, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.7490220820189274, | |
| "grad_norm": 0.4314595245056323, | |
| "learning_rate": 1.804091967342569e-06, | |
| "loss": 0.4392, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.7510410094637224, | |
| "grad_norm": 0.4960471983973586, | |
| "learning_rate": 1.777073520303874e-06, | |
| "loss": 0.4406, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.7530599369085174, | |
| "grad_norm": 0.47724657720097824, | |
| "learning_rate": 1.7502151278865798e-06, | |
| "loss": 0.4391, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.7550788643533123, | |
| "grad_norm": 0.45566327509907684, | |
| "learning_rate": 1.7235181239125587e-06, | |
| "loss": 0.4411, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.7570977917981072, | |
| "grad_norm": 0.47859031492685, | |
| "learning_rate": 1.6969838341889288e-06, | |
| "loss": 0.4412, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.7591167192429022, | |
| "grad_norm": 0.4239970850626806, | |
| "learning_rate": 1.67061357644221e-06, | |
| "loss": 0.4327, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.7611356466876972, | |
| "grad_norm": 0.41439508221708543, | |
| "learning_rate": 1.6444086602528835e-06, | |
| "loss": 0.4403, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.7631545741324921, | |
| "grad_norm": 0.4724942386548484, | |
| "learning_rate": 1.6183703869903666e-06, | |
| "loss": 0.4399, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.765173501577287, | |
| "grad_norm": 0.5068083200686946, | |
| "learning_rate": 1.59250004974837e-06, | |
| "loss": 0.4484, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.7671924290220821, | |
| "grad_norm": 0.47623913375530136, | |
| "learning_rate": 1.5667989332806894e-06, | |
| "loss": 0.442, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.769211356466877, | |
| "grad_norm": 0.5448383844625657, | |
| "learning_rate": 1.5412683139374068e-06, | |
| "loss": 0.4339, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.7712302839116719, | |
| "grad_norm": 0.4819612793724808, | |
| "learning_rate": 1.5159094596014934e-06, | |
| "loss": 0.4478, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.7732492113564668, | |
| "grad_norm": 0.4456007417706316, | |
| "learning_rate": 1.4907236296258598e-06, | |
| "loss": 0.4423, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.7752681388012618, | |
| "grad_norm": 0.43979298393219013, | |
| "learning_rate": 1.4657120747708014e-06, | |
| "loss": 0.4417, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.7772870662460568, | |
| "grad_norm": 0.5066920888924865, | |
| "learning_rate": 1.4408760371418895e-06, | |
| "loss": 0.4397, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.7793059936908517, | |
| "grad_norm": 0.47046124228822955, | |
| "learning_rate": 1.4162167501282925e-06, | |
| "loss": 0.4416, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.7813249211356467, | |
| "grad_norm": 0.4558898604297212, | |
| "learning_rate": 1.391735438341514e-06, | |
| "loss": 0.437, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.7833438485804416, | |
| "grad_norm": 0.4635342780752439, | |
| "learning_rate": 1.3674333175545818e-06, | |
| "loss": 0.4425, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.7853627760252366, | |
| "grad_norm": 0.450134125379297, | |
| "learning_rate": 1.343311594641674e-06, | |
| "loss": 0.4371, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.7873817034700316, | |
| "grad_norm": 0.48501901287598154, | |
| "learning_rate": 1.3193714675181813e-06, | |
| "loss": 0.4428, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.7894006309148265, | |
| "grad_norm": 0.41955405495588977, | |
| "learning_rate": 1.2956141250812144e-06, | |
| "loss": 0.4307, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.7914195583596214, | |
| "grad_norm": 0.41659445986128424, | |
| "learning_rate": 1.272040747150563e-06, | |
| "loss": 0.447, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.7934384858044164, | |
| "grad_norm": 0.44707673152149113, | |
| "learning_rate": 1.2486525044101123e-06, | |
| "loss": 0.4333, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.7954574132492114, | |
| "grad_norm": 0.4698766268506151, | |
| "learning_rate": 1.2254505583496922e-06, | |
| "loss": 0.4403, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.7974763406940063, | |
| "grad_norm": 0.47512471796296707, | |
| "learning_rate": 1.2024360612074055e-06, | |
| "loss": 0.4454, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.7994952681388012, | |
| "grad_norm": 0.4585910400457452, | |
| "learning_rate": 1.179610155912405e-06, | |
| "loss": 0.436, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.8015141955835963, | |
| "grad_norm": 0.4557389746705491, | |
| "learning_rate": 1.1569739760281295e-06, | |
| "loss": 0.4393, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.8035331230283912, | |
| "grad_norm": 0.4553106102818403, | |
| "learning_rate": 1.1345286456960186e-06, | |
| "loss": 0.4439, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.8055520504731861, | |
| "grad_norm": 0.4443887135897382, | |
| "learning_rate": 1.1122752795796748e-06, | |
| "loss": 0.4413, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.807570977917981, | |
| "grad_norm": 0.47272003456758904, | |
| "learning_rate": 1.0902149828095154e-06, | |
| "loss": 0.4422, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.809589905362776, | |
| "grad_norm": 0.43703108025040205, | |
| "learning_rate": 1.0683488509278928e-06, | |
| "loss": 0.4386, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.811608832807571, | |
| "grad_norm": 0.4441202945224273, | |
| "learning_rate": 1.0466779698346797e-06, | |
| "loss": 0.4363, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.8136277602523659, | |
| "grad_norm": 0.42622028909468485, | |
| "learning_rate": 1.025203415733348e-06, | |
| "loss": 0.4365, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.8156466876971609, | |
| "grad_norm": 0.440821020950604, | |
| "learning_rate": 1.0039262550775235e-06, | |
| "loss": 0.4345, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.8176656151419558, | |
| "grad_norm": 0.4670601139166515, | |
| "learning_rate": 9.828475445180212e-07, | |
| "loss": 0.4404, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.8196845425867508, | |
| "grad_norm": 0.4363041969561276, | |
| "learning_rate": 9.61968330850374e-07, | |
| "loss": 0.4369, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.8217034700315458, | |
| "grad_norm": 0.4866069797752605, | |
| "learning_rate": 9.412896509628439e-07, | |
| "loss": 0.4434, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.8237223974763407, | |
| "grad_norm": 0.4360948594973755, | |
| "learning_rate": 9.208125317849304e-07, | |
| "loss": 0.4395, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.8257413249211356, | |
| "grad_norm": 0.4490621547393745, | |
| "learning_rate": 9.005379902363753e-07, | |
| "loss": 0.4405, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.8277602523659306, | |
| "grad_norm": 0.4809678412101839, | |
| "learning_rate": 8.804670331766557e-07, | |
| "loss": 0.4449, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.8297791798107256, | |
| "grad_norm": 0.4626290445825763, | |
| "learning_rate": 8.606006573549836e-07, | |
| "loss": 0.4359, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.8317981072555205, | |
| "grad_norm": 0.5187638972410576, | |
| "learning_rate": 8.409398493608089e-07, | |
| "loss": 0.4413, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.8338170347003154, | |
| "grad_norm": 0.46587792642977804, | |
| "learning_rate": 8.214855855748249e-07, | |
| "loss": 0.4409, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.8358359621451104, | |
| "grad_norm": 0.4706901825283824, | |
| "learning_rate": 8.022388321204688e-07, | |
| "loss": 0.4354, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.8378548895899054, | |
| "grad_norm": 0.562791948402191, | |
| "learning_rate": 7.832005448159585e-07, | |
| "loss": 0.4445, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.8398738170347003, | |
| "grad_norm": 0.4543738469403552, | |
| "learning_rate": 7.643716691268171e-07, | |
| "loss": 0.4374, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.8418927444794952, | |
| "grad_norm": 1.6174532184269235, | |
| "learning_rate": 7.457531401189188e-07, | |
| "loss": 0.4408, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.8439116719242902, | |
| "grad_norm": 0.46922997370194347, | |
| "learning_rate": 7.273458824120555e-07, | |
| "loss": 0.4376, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.8459305993690852, | |
| "grad_norm": 0.44210687899012985, | |
| "learning_rate": 7.091508101340189e-07, | |
| "loss": 0.4407, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.8479495268138801, | |
| "grad_norm": 0.488809217492704, | |
| "learning_rate": 6.91168826875201e-07, | |
| "loss": 0.4489, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.8499684542586751, | |
| "grad_norm": 0.4335482219101724, | |
| "learning_rate": 6.734008256437253e-07, | |
| "loss": 0.435, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.85198738170347, | |
| "grad_norm": 0.4549494452467777, | |
| "learning_rate": 6.558476888210935e-07, | |
| "loss": 0.4375, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.854006309148265, | |
| "grad_norm": 0.44896647048001603, | |
| "learning_rate": 6.38510288118368e-07, | |
| "loss": 0.4409, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.85602523659306, | |
| "grad_norm": 0.43685823117883865, | |
| "learning_rate": 6.213894845328844e-07, | |
| "loss": 0.4418, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.8580441640378549, | |
| "grad_norm": 0.47792243578520655, | |
| "learning_rate": 6.044861283054876e-07, | |
| "loss": 0.4452, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.8600630914826498, | |
| "grad_norm": 0.43885235740502504, | |
| "learning_rate": 5.878010588783101e-07, | |
| "loss": 0.4423, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.8620820189274448, | |
| "grad_norm": 0.5121668333108534, | |
| "learning_rate": 5.713351048530874e-07, | |
| "loss": 0.4429, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.8641009463722398, | |
| "grad_norm": 0.44403508398564756, | |
| "learning_rate": 5.550890839500067e-07, | |
| "loss": 0.4511, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.8661198738170347, | |
| "grad_norm": 0.42769598938785636, | |
| "learning_rate": 5.390638029670908e-07, | |
| "loss": 0.4322, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.8681388012618296, | |
| "grad_norm": 0.4949663561795469, | |
| "learning_rate": 5.232600577401436e-07, | |
| "loss": 0.4457, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.8701577287066246, | |
| "grad_norm": 0.4489314407451146, | |
| "learning_rate": 5.076786331032224e-07, | |
| "loss": 0.4327, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.8721766561514196, | |
| "grad_norm": 0.48025463446625855, | |
| "learning_rate": 4.923203028496576e-07, | |
| "loss": 0.4447, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.8741955835962145, | |
| "grad_norm": 0.5743773868003562, | |
| "learning_rate": 4.771858296936311e-07, | |
| "loss": 0.4402, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.8762145110410094, | |
| "grad_norm": 0.4329672101913256, | |
| "learning_rate": 4.622759652322989e-07, | |
| "loss": 0.4455, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.8782334384858044, | |
| "grad_norm": 0.49048963795844497, | |
| "learning_rate": 4.475914499084605e-07, | |
| "loss": 0.4467, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.8802523659305994, | |
| "grad_norm": 0.4829611101804599, | |
| "learning_rate": 4.331330129737954e-07, | |
| "loss": 0.4287, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.8822712933753943, | |
| "grad_norm": 0.44140910883095635, | |
| "learning_rate": 4.189013724526392e-07, | |
| "loss": 0.4446, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.8842902208201893, | |
| "grad_norm": 0.45272062627566334, | |
| "learning_rate": 4.048972351063313e-07, | |
| "loss": 0.4354, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.8863091482649842, | |
| "grad_norm": 0.7049959167129851, | |
| "learning_rate": 3.9112129639811535e-07, | |
| "loss": 0.4346, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.8883280757097792, | |
| "grad_norm": 0.4811238411002169, | |
| "learning_rate": 3.7757424045860025e-07, | |
| "loss": 0.4372, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.8903470031545742, | |
| "grad_norm": 0.4540483251684103, | |
| "learning_rate": 3.642567400517838e-07, | |
| "loss": 0.4314, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.8923659305993691, | |
| "grad_norm": 0.47603601308214405, | |
| "learning_rate": 3.511694565416485e-07, | |
| "loss": 0.4382, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.894384858044164, | |
| "grad_norm": 0.4286142580967842, | |
| "learning_rate": 3.383130398593121e-07, | |
| "loss": 0.4399, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.8964037854889589, | |
| "grad_norm": 0.4337017987281163, | |
| "learning_rate": 3.2568812847074913e-07, | |
| "loss": 0.4437, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.898422712933754, | |
| "grad_norm": 0.48386529314250487, | |
| "learning_rate": 3.1329534934509186e-07, | |
| "loss": 0.4415, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.9004416403785489, | |
| "grad_norm": 0.46990087367597777, | |
| "learning_rate": 3.011353179234888e-07, | |
| "loss": 0.4317, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.9024605678233438, | |
| "grad_norm": 0.42506034975479673, | |
| "learning_rate": 2.8920863808853947e-07, | |
| "loss": 0.444, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.9044794952681388, | |
| "grad_norm": 0.6216117310744336, | |
| "learning_rate": 2.775159021343082e-07, | |
| "loss": 0.4367, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.9064984227129338, | |
| "grad_norm": 0.4583995655944248, | |
| "learning_rate": 2.660576907369111e-07, | |
| "loss": 0.4412, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.9085173501577287, | |
| "grad_norm": 0.4889160958013681, | |
| "learning_rate": 2.5483457292567517e-07, | |
| "loss": 0.4403, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.9105362776025236, | |
| "grad_norm": 0.4267838945833425, | |
| "learning_rate": 2.4384710605488195e-07, | |
| "loss": 0.4402, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.9125552050473186, | |
| "grad_norm": 0.4986776395611631, | |
| "learning_rate": 2.330958357760882e-07, | |
| "loss": 0.4399, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.9145741324921136, | |
| "grad_norm": 0.459784020601065, | |
| "learning_rate": 2.225812960110263e-07, | |
| "loss": 0.4316, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.9165930599369085, | |
| "grad_norm": 0.44426443349474987, | |
| "learning_rate": 2.123040089250944e-07, | |
| "loss": 0.4402, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.9186119873817035, | |
| "grad_norm": 0.49235316049109906, | |
| "learning_rate": 2.0226448490141715e-07, | |
| "loss": 0.4481, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.9206309148264984, | |
| "grad_norm": 0.43058790479448, | |
| "learning_rate": 1.9246322251550653e-07, | |
| "loss": 0.4317, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.9226498422712934, | |
| "grad_norm": 0.4966838385984267, | |
| "learning_rate": 1.829007085104978e-07, | |
| "loss": 0.4427, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.9246687697160884, | |
| "grad_norm": 0.4666275183152453, | |
| "learning_rate": 1.7357741777297987e-07, | |
| "loss": 0.4485, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.9266876971608833, | |
| "grad_norm": 0.4412830191687912, | |
| "learning_rate": 1.644938133094076e-07, | |
| "loss": 0.4312, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.9287066246056782, | |
| "grad_norm": 0.4886548137136921, | |
| "learning_rate": 1.5565034622311349e-07, | |
| "loss": 0.4461, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.9307255520504731, | |
| "grad_norm": 0.4652701957357695, | |
| "learning_rate": 1.4704745569190327e-07, | |
| "loss": 0.4375, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.9327444794952682, | |
| "grad_norm": 0.5298674695548953, | |
| "learning_rate": 1.3868556894624353e-07, | |
| "loss": 0.4399, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.9347634069400631, | |
| "grad_norm": 0.4562802443840814, | |
| "learning_rate": 1.30565101248048e-07, | |
| "loss": 0.4385, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.936782334384858, | |
| "grad_norm": 0.4578530629768795, | |
| "learning_rate": 1.226864558700541e-07, | |
| "loss": 0.4416, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.938801261829653, | |
| "grad_norm": 0.4951660779551572, | |
| "learning_rate": 1.1505002407579512e-07, | |
| "loss": 0.4424, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.940820189274448, | |
| "grad_norm": 0.44757841438265406, | |
| "learning_rate": 1.0765618510017118e-07, | |
| "loss": 0.4368, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.9428391167192429, | |
| "grad_norm": 1.5035499500846865, | |
| "learning_rate": 1.0050530613061327e-07, | |
| "loss": 0.4353, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.9448580441640378, | |
| "grad_norm": 0.4938389329390479, | |
| "learning_rate": 9.359774228885177e-08, | |
| "loss": 0.4391, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.9468769716088328, | |
| "grad_norm": 0.4711850612472287, | |
| "learning_rate": 8.693383661327882e-08, | |
| "loss": 0.4388, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.9488958990536278, | |
| "grad_norm": 0.48652374519922575, | |
| "learning_rate": 8.051392004191139e-08, | |
| "loss": 0.4486, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.9509148264984227, | |
| "grad_norm": 0.4473704324045684, | |
| "learning_rate": 7.433831139595727e-08, | |
| "loss": 0.4385, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.9529337539432177, | |
| "grad_norm": 0.509944701555344, | |
| "learning_rate": 6.840731736398542e-08, | |
| "loss": 0.4362, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.9549526813880126, | |
| "grad_norm": 0.4468123798191573, | |
| "learning_rate": 6.272123248668993e-08, | |
| "loss": 0.4356, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.9569716088328075, | |
| "grad_norm": 0.4432856047100861, | |
| "learning_rate": 5.728033914226494e-08, | |
| "loss": 0.4406, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.9589905362776026, | |
| "grad_norm": 0.8312641118207356, | |
| "learning_rate": 5.2084907532383156e-08, | |
| "loss": 0.4457, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.9610094637223975, | |
| "grad_norm": 0.4308428180533281, | |
| "learning_rate": 4.7135195668776e-08, | |
| "loss": 0.4337, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.9630283911671924, | |
| "grad_norm": 0.47706748470749133, | |
| "learning_rate": 4.243144936041943e-08, | |
| "loss": 0.4413, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.9650473186119873, | |
| "grad_norm": 0.46426542825118344, | |
| "learning_rate": 3.797390220132757e-08, | |
| "loss": 0.4325, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.9670662460567824, | |
| "grad_norm": 0.4324982289321672, | |
| "learning_rate": 3.3762775558952576e-08, | |
| "loss": 0.4377, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.9690851735015773, | |
| "grad_norm": 0.47719223462129745, | |
| "learning_rate": 2.9798278563190618e-08, | |
| "loss": 0.4439, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.9711041009463722, | |
| "grad_norm": 0.4540530080947163, | |
| "learning_rate": 2.6080608095997438e-08, | |
| "loss": 0.4377, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.9731230283911672, | |
| "grad_norm": 0.4413909184803219, | |
| "learning_rate": 2.2609948781607827e-08, | |
| "loss": 0.4424, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.9751419558359622, | |
| "grad_norm": 0.4712177319951425, | |
| "learning_rate": 1.9386472977370728e-08, | |
| "loss": 0.4412, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.9771608832807571, | |
| "grad_norm": 0.443140708468747, | |
| "learning_rate": 1.6410340765187772e-08, | |
| "loss": 0.4382, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.979179810725552, | |
| "grad_norm": 0.4782858905945508, | |
| "learning_rate": 1.3681699943564053e-08, | |
| "loss": 0.4387, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.981198738170347, | |
| "grad_norm": 0.6206662222549734, | |
| "learning_rate": 1.1200686020266805e-08, | |
| "loss": 0.4431, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.983217665615142, | |
| "grad_norm": 2.8460570799996985, | |
| "learning_rate": 8.967422205598541e-09, | |
| "loss": 0.4393, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.9852365930599369, | |
| "grad_norm": 0.4435087118229933, | |
| "learning_rate": 6.982019406275853e-09, | |
| "loss": 0.436, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.9872555205047319, | |
| "grad_norm": 0.4300508595922509, | |
| "learning_rate": 5.244576219922693e-09, | |
| "loss": 0.4316, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.9892744479495268, | |
| "grad_norm": 0.49712300151356087, | |
| "learning_rate": 3.755178930173742e-09, | |
| "loss": 0.4463, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.9912933753943217, | |
| "grad_norm": 0.4798533947795922, | |
| "learning_rate": 2.513901502389504e-09, | |
| "loss": 0.4327, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.9933123028391168, | |
| "grad_norm": 0.4936501161499046, | |
| "learning_rate": 1.520805579983131e-09, | |
| "loss": 0.4403, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.9953312302839117, | |
| "grad_norm": 0.4638813307919199, | |
| "learning_rate": 7.759404813600935e-10, | |
| "loss": 0.4385, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.9973501577287066, | |
| "grad_norm": 0.43789683220104403, | |
| "learning_rate": 2.7934319746625306e-10, | |
| "loss": 0.4387, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.9993690851735015, | |
| "grad_norm": 0.4980004114526748, | |
| "learning_rate": 3.103838995321962e-11, | |
| "loss": 0.4391, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 4954, | |
| "total_flos": 1.1092292323919266e+19, | |
| "train_loss": 0.46906891030845593, | |
| "train_runtime": 81648.0087, | |
| "train_samples_per_second": 7.765, | |
| "train_steps_per_second": 0.061 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4954, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 50000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.1092292323919266e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |