| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.218982703597179, |
| "eval_steps": 500, |
| "global_step": 112640, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9917917602406e-05, |
| "loss": 9.4912, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9835835204812e-05, |
| "loss": 9.0253, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9753752807218e-05, |
| "loss": 8.889, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9671670409624e-05, |
| "loss": 8.7297, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.958958801203e-05, |
| "loss": 8.6079, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.9507505614436e-05, |
| "loss": 8.525, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.9425423216842e-05, |
| "loss": 8.4223, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.9343340819248e-05, |
| "loss": 8.3692, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.926125842165399e-05, |
| "loss": 8.332, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.917917602406e-05, |
| "loss": 8.2672, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9097093626466e-05, |
| "loss": 8.1887, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9015011228871996e-05, |
| "loss": 8.1799, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.893292883127799e-05, |
| "loss": 8.1428, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.8850846433683995e-05, |
| "loss": 8.1542, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.8768764036089995e-05, |
| "loss": 8.0947, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.868668163849599e-05, |
| "loss": 8.0386, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.860459924090199e-05, |
| "loss": 7.9978, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.852251684330799e-05, |
| "loss": 7.9489, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.8440434445713986e-05, |
| "loss": 7.9363, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.8358352048119985e-05, |
| "loss": 7.9265, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.8276269650525985e-05, |
| "loss": 7.9027, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.819418725293199e-05, |
| "loss": 7.8451, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.8112104855337984e-05, |
| "loss": 7.7337, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.8030022457743983e-05, |
| "loss": 7.7837, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.794794006014998e-05, |
| "loss": 7.7527, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.786585766255598e-05, |
| "loss": 7.7434, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.778377526496198e-05, |
| "loss": 7.6981, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.770169286736798e-05, |
| "loss": 7.6708, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.761961046977398e-05, |
| "loss": 7.5951, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.753752807217998e-05, |
| "loss": 7.5975, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.745544567458598e-05, |
| "loss": 7.5186, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.737336327699198e-05, |
| "loss": 7.4844, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.729128087939798e-05, |
| "loss": 7.4021, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.720919848180398e-05, |
| "loss": 7.4089, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.712711608420998e-05, |
| "loss": 7.3132, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.704503368661598e-05, |
| "loss": 7.3552, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.696295128902197e-05, |
| "loss": 7.3327, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.6880868891427976e-05, |
| "loss": 7.2294, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.6798786493833976e-05, |
| "loss": 7.2296, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.671670409623997e-05, |
| "loss": 7.1595, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.663462169864597e-05, |
| "loss": 7.1434, |
| "step": 10250 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6552539301051974e-05, |
| "loss": 7.1707, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6470456903457974e-05, |
| "loss": 7.0756, |
| "step": 10750 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6388374505863967e-05, |
| "loss": 7.0205, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6306292108269966e-05, |
| "loss": 7.0475, |
| "step": 11250 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.622420971067597e-05, |
| "loss": 7.0355, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.6142127313081965e-05, |
| "loss": 6.9695, |
| "step": 11750 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.6060044915487965e-05, |
| "loss": 6.962, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.5977962517893964e-05, |
| "loss": 6.9281, |
| "step": 12250 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.5895880120299964e-05, |
| "loss": 6.9421, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.581379772270596e-05, |
| "loss": 6.9498, |
| "step": 12750 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.573171532511196e-05, |
| "loss": 6.8833, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.564963292751796e-05, |
| "loss": 6.8878, |
| "step": 13250 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.556755052992396e-05, |
| "loss": 6.845, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.548546813232996e-05, |
| "loss": 6.833, |
| "step": 13750 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.540338573473596e-05, |
| "loss": 6.8062, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.532130333714196e-05, |
| "loss": 6.8126, |
| "step": 14250 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.523922093954796e-05, |
| "loss": 6.7655, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.515713854195396e-05, |
| "loss": 6.7613, |
| "step": 14750 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.507505614435996e-05, |
| "loss": 6.6869, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.499297374676595e-05, |
| "loss": 6.6834, |
| "step": 15250 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.491089134917196e-05, |
| "loss": 6.7371, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.482880895157796e-05, |
| "loss": 6.6962, |
| "step": 15750 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.4746726553983956e-05, |
| "loss": 6.6554, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.466464415638995e-05, |
| "loss": 6.6898, |
| "step": 16250 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.4582561758795955e-05, |
| "loss": 6.6349, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.4500479361201955e-05, |
| "loss": 6.6075, |
| "step": 16750 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.441839696360795e-05, |
| "loss": 6.6128, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 4.433631456601395e-05, |
| "loss": 6.5883, |
| "step": 17250 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 4.4254232168419953e-05, |
| "loss": 6.5475, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 4.4172149770825946e-05, |
| "loss": 6.4996, |
| "step": 17750 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 4.4090067373231946e-05, |
| "loss": 6.5243, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 4.4007984975637945e-05, |
| "loss": 6.5113, |
| "step": 18250 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 4.392590257804395e-05, |
| "loss": 6.4859, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 4.3843820180449944e-05, |
| "loss": 6.473, |
| "step": 18750 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 4.3761737782855944e-05, |
| "loss": 6.5033, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 4.367965538526194e-05, |
| "loss": 6.4411, |
| "step": 19250 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 4.359757298766794e-05, |
| "loss": 6.4374, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 4.351549059007394e-05, |
| "loss": 6.4449, |
| "step": 19750 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 4.343340819247994e-05, |
| "loss": 6.4043, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 4.335132579488594e-05, |
| "loss": 6.3784, |
| "step": 20250 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 4.326924339729194e-05, |
| "loss": 6.3888, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 4.318716099969794e-05, |
| "loss": 6.3818, |
| "step": 20750 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 4.310507860210394e-05, |
| "loss": 6.3793, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 4.302299620450993e-05, |
| "loss": 6.3635, |
| "step": 21250 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 4.294091380691594e-05, |
| "loss": 6.3109, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 4.285883140932194e-05, |
| "loss": 6.3004, |
| "step": 21750 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 4.277674901172794e-05, |
| "loss": 6.3036, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 4.269466661413393e-05, |
| "loss": 6.233, |
| "step": 22250 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 4.2612584216539936e-05, |
| "loss": 6.242, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 4.2530501818945936e-05, |
| "loss": 6.2686, |
| "step": 22750 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 4.244841942135193e-05, |
| "loss": 6.2183, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 4.236633702375793e-05, |
| "loss": 6.2428, |
| "step": 23250 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 4.2284254626163934e-05, |
| "loss": 6.1933, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 4.2202172228569934e-05, |
| "loss": 6.1957, |
| "step": 23750 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 4.212008983097593e-05, |
| "loss": 6.2449, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 4.2038007433381926e-05, |
| "loss": 6.186, |
| "step": 24250 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 4.195592503578793e-05, |
| "loss": 6.1722, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 4.1873842638193925e-05, |
| "loss": 6.193, |
| "step": 24750 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 4.1791760240599925e-05, |
| "loss": 6.1829, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 4.1709677843005924e-05, |
| "loss": 6.1587, |
| "step": 25250 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 4.1627595445411924e-05, |
| "loss": 6.1058, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 4.154551304781792e-05, |
| "loss": 6.1346, |
| "step": 25750 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 4.146343065022392e-05, |
| "loss": 6.0702, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 4.138134825262992e-05, |
| "loss": 6.0994, |
| "step": 26250 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 4.129926585503592e-05, |
| "loss": 6.0871, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 4.121718345744192e-05, |
| "loss": 6.0275, |
| "step": 26750 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 4.113510105984792e-05, |
| "loss": 6.0883, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 4.105301866225392e-05, |
| "loss": 6.0602, |
| "step": 27250 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 4.097093626465992e-05, |
| "loss": 5.9898, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 4.088885386706592e-05, |
| "loss": 6.0722, |
| "step": 27750 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 4.080677146947192e-05, |
| "loss": 5.987, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 4.072468907187791e-05, |
| "loss": 6.0631, |
| "step": 28250 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 4.064260667428392e-05, |
| "loss": 5.9497, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 4.056052427668992e-05, |
| "loss": 5.9776, |
| "step": 28750 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 4.047844187909591e-05, |
| "loss": 5.9793, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 4.039635948150191e-05, |
| "loss": 6.004, |
| "step": 29250 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 4.0314277083907916e-05, |
| "loss": 5.9236, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 4.0232194686313915e-05, |
| "loss": 5.9507, |
| "step": 29750 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 4.015011228871991e-05, |
| "loss": 5.9362, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 4.006802989112591e-05, |
| "loss": 5.9727, |
| "step": 30250 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 3.9985947493531914e-05, |
| "loss": 5.8949, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 3.9903865095937906e-05, |
| "loss": 5.9043, |
| "step": 30750 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 3.9821782698343906e-05, |
| "loss": 5.8899, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 3.9739700300749905e-05, |
| "loss": 5.8585, |
| "step": 31250 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 3.9657617903155905e-05, |
| "loss": 5.8793, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 3.9575535505561904e-05, |
| "loss": 5.8836, |
| "step": 31750 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 3.9493453107967904e-05, |
| "loss": 5.8597, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 3.94113707103739e-05, |
| "loss": 5.8795, |
| "step": 32250 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 3.93292883127799e-05, |
| "loss": 5.8787, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 3.92472059151859e-05, |
| "loss": 5.8603, |
| "step": 32750 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 3.91651235175919e-05, |
| "loss": 5.8186, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 3.90830411199979e-05, |
| "loss": 5.7699, |
| "step": 33250 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 3.90009587224039e-05, |
| "loss": 5.7936, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 3.89188763248099e-05, |
| "loss": 5.7771, |
| "step": 33750 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 3.88367939272159e-05, |
| "loss": 5.7897, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 3.875471152962189e-05, |
| "loss": 5.7541, |
| "step": 34250 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 3.86726291320279e-05, |
| "loss": 5.7193, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 3.85905467344339e-05, |
| "loss": 5.7607, |
| "step": 34750 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 3.85084643368399e-05, |
| "loss": 5.743, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 3.842638193924589e-05, |
| "loss": 5.7274, |
| "step": 35250 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 3.8344299541651897e-05, |
| "loss": 5.7183, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 3.8262217144057896e-05, |
| "loss": 5.6701, |
| "step": 35750 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 3.818013474646389e-05, |
| "loss": 5.7014, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 3.809805234886989e-05, |
| "loss": 5.684, |
| "step": 36250 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.8015969951275895e-05, |
| "loss": 5.697, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.793388755368189e-05, |
| "loss": 5.6821, |
| "step": 36750 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.785180515608789e-05, |
| "loss": 5.739, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.7769722758493886e-05, |
| "loss": 5.6367, |
| "step": 37250 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.768764036089989e-05, |
| "loss": 5.625, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.7605557963305885e-05, |
| "loss": 5.6101, |
| "step": 37750 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.7523475565711885e-05, |
| "loss": 5.6124, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.7441393168117884e-05, |
| "loss": 5.6286, |
| "step": 38250 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 3.7359310770523884e-05, |
| "loss": 5.6069, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 3.727722837292988e-05, |
| "loss": 5.6131, |
| "step": 38750 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.719514597533588e-05, |
| "loss": 5.588, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 3.711306357774188e-05, |
| "loss": 5.5794, |
| "step": 39250 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.703098118014788e-05, |
| "loss": 5.6147, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 3.694889878255388e-05, |
| "loss": 5.5989, |
| "step": 39750 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.686681638495988e-05, |
| "loss": 5.5775, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 3.678473398736588e-05, |
| "loss": 5.554, |
| "step": 40250 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.670265158977188e-05, |
| "loss": 5.6252, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.662056919217788e-05, |
| "loss": 5.575, |
| "step": 40750 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.653848679458388e-05, |
| "loss": 5.5285, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.645640439698987e-05, |
| "loss": 5.5481, |
| "step": 41250 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.637432199939588e-05, |
| "loss": 5.4834, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.629223960180188e-05, |
| "loss": 5.5461, |
| "step": 41750 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.621015720420787e-05, |
| "loss": 5.5474, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.612807480661387e-05, |
| "loss": 5.53, |
| "step": 42250 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.6045992409019876e-05, |
| "loss": 5.4902, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.5963910011425875e-05, |
| "loss": 5.4589, |
| "step": 42750 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.588182761383187e-05, |
| "loss": 5.4909, |
| "step": 43000 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.579974521623787e-05, |
| "loss": 5.4721, |
| "step": 43250 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 3.5717662818643874e-05, |
| "loss": 5.5191, |
| "step": 43500 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 3.5635580421049866e-05, |
| "loss": 5.4646, |
| "step": 43750 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 3.5553498023455866e-05, |
| "loss": 5.4786, |
| "step": 44000 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 3.5471415625861865e-05, |
| "loss": 5.4943, |
| "step": 44250 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 3.5389333228267865e-05, |
| "loss": 5.443, |
| "step": 44500 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 3.5307250830673864e-05, |
| "loss": 5.4313, |
| "step": 44750 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 3.5225168433079864e-05, |
| "loss": 5.4594, |
| "step": 45000 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 3.514308603548586e-05, |
| "loss": 5.4203, |
| "step": 45250 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 3.506100363789186e-05, |
| "loss": 5.3763, |
| "step": 45500 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 3.497892124029786e-05, |
| "loss": 5.445, |
| "step": 45750 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 3.489683884270386e-05, |
| "loss": 5.4139, |
| "step": 46000 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 3.481475644510986e-05, |
| "loss": 5.3907, |
| "step": 46250 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.473267404751586e-05, |
| "loss": 5.4021, |
| "step": 46500 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.465059164992186e-05, |
| "loss": 5.3955, |
| "step": 46750 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 3.456850925232786e-05, |
| "loss": 5.3653, |
| "step": 47000 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 3.448642685473385e-05, |
| "loss": 5.3789, |
| "step": 47250 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.440434445713986e-05, |
| "loss": 5.3561, |
| "step": 47500 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.432226205954586e-05, |
| "loss": 5.3199, |
| "step": 47750 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.424017966195186e-05, |
| "loss": 5.3322, |
| "step": 48000 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.415809726435785e-05, |
| "loss": 5.3303, |
| "step": 48250 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 3.407601486676386e-05, |
| "loss": 5.2998, |
| "step": 48500 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 3.3993932469169856e-05, |
| "loss": 5.3534, |
| "step": 48750 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 3.391185007157585e-05, |
| "loss": 5.3316, |
| "step": 49000 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 3.382976767398185e-05, |
| "loss": 5.3047, |
| "step": 49250 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.3747685276387855e-05, |
| "loss": 5.2876, |
| "step": 49500 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.366560287879385e-05, |
| "loss": 5.3265, |
| "step": 49750 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.358352048119985e-05, |
| "loss": 5.2812, |
| "step": 50000 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 3.3501438083605846e-05, |
| "loss": 5.2702, |
| "step": 50250 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 3.341935568601185e-05, |
| "loss": 5.2525, |
| "step": 50500 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3337273288417845e-05, |
| "loss": 5.293, |
| "step": 50750 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 5.185632228851318, |
| "eval_runtime": 624.5274, |
| "eval_samples_per_second": 289.038, |
| "eval_steps_per_second": 9.032, |
| "step": 50762 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3255190890823845e-05, |
| "loss": 5.252, |
| "step": 51000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3173108493229844e-05, |
| "loss": 5.2646, |
| "step": 51250 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3091026095635844e-05, |
| "loss": 5.2291, |
| "step": 51500 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.300894369804184e-05, |
| "loss": 5.2391, |
| "step": 51750 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.292686130044784e-05, |
| "loss": 5.1969, |
| "step": 52000 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 3.284477890285384e-05, |
| "loss": 5.2636, |
| "step": 52250 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 3.276269650525984e-05, |
| "loss": 5.23, |
| "step": 52500 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 3.268061410766584e-05, |
| "loss": 5.2188, |
| "step": 52750 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 3.259853171007184e-05, |
| "loss": 5.2196, |
| "step": 53000 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 3.2516449312477834e-05, |
| "loss": 5.1822, |
| "step": 53250 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 3.243436691488384e-05, |
| "loss": 5.1899, |
| "step": 53500 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 3.235228451728984e-05, |
| "loss": 5.1678, |
| "step": 53750 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 3.227020211969584e-05, |
| "loss": 5.1901, |
| "step": 54000 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 3.218811972210183e-05, |
| "loss": 5.2002, |
| "step": 54250 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 3.210603732450784e-05, |
| "loss": 5.1926, |
| "step": 54500 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 3.202395492691384e-05, |
| "loss": 5.1494, |
| "step": 54750 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 3.194187252931983e-05, |
| "loss": 5.1964, |
| "step": 55000 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 3.185979013172583e-05, |
| "loss": 5.1541, |
| "step": 55250 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 3.1777707734131836e-05, |
| "loss": 5.1612, |
| "step": 55500 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 3.1695625336537835e-05, |
| "loss": 5.1887, |
| "step": 55750 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 3.161354293894383e-05, |
| "loss": 5.16, |
| "step": 56000 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 3.153146054134983e-05, |
| "loss": 5.1533, |
| "step": 56250 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 3.1449378143755834e-05, |
| "loss": 5.1851, |
| "step": 56500 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 3.1367295746161826e-05, |
| "loss": 5.1552, |
| "step": 56750 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 3.1285213348567826e-05, |
| "loss": 5.1378, |
| "step": 57000 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 3.1203130950973825e-05, |
| "loss": 5.118, |
| "step": 57250 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 3.1121048553379825e-05, |
| "loss": 5.1494, |
| "step": 57500 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 3.1038966155785824e-05, |
| "loss": 5.0843, |
| "step": 57750 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 3.0956883758191824e-05, |
| "loss": 5.097, |
| "step": 58000 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 3.0874801360597823e-05, |
| "loss": 5.103, |
| "step": 58250 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 3.079271896300382e-05, |
| "loss": 5.0778, |
| "step": 58500 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 3.071063656540982e-05, |
| "loss": 5.0565, |
| "step": 58750 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 3.062855416781582e-05, |
| "loss": 5.1443, |
| "step": 59000 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 3.054647177022182e-05, |
| "loss": 5.0715, |
| "step": 59250 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 3.046438937262782e-05, |
| "loss": 5.0871, |
| "step": 59500 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 3.038230697503382e-05, |
| "loss": 5.0992, |
| "step": 59750 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 3.0300224577439816e-05, |
| "loss": 5.0874, |
| "step": 60000 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 3.0218142179845816e-05, |
| "loss": 5.1127, |
| "step": 60250 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 3.013605978225182e-05, |
| "loss": 5.0684, |
| "step": 60500 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 3.0053977384657815e-05, |
| "loss": 5.0856, |
| "step": 60750 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 2.9971894987063814e-05, |
| "loss": 5.062, |
| "step": 61000 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 2.988981258946981e-05, |
| "loss": 5.0753, |
| "step": 61250 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 2.9807730191875817e-05, |
| "loss": 5.055, |
| "step": 61500 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 2.9725647794281813e-05, |
| "loss": 5.0444, |
| "step": 61750 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 2.9643565396687812e-05, |
| "loss": 5.0642, |
| "step": 62000 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 2.956148299909381e-05, |
| "loss": 5.0566, |
| "step": 62250 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 2.947940060149981e-05, |
| "loss": 5.0445, |
| "step": 62500 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 2.939731820390581e-05, |
| "loss": 5.0221, |
| "step": 62750 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 2.9315235806311807e-05, |
| "loss": 5.0624, |
| "step": 63000 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 2.9233153408717807e-05, |
| "loss": 5.0271, |
| "step": 63250 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 2.915107101112381e-05, |
| "loss": 5.0478, |
| "step": 63500 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 2.906898861352981e-05, |
| "loss": 5.0316, |
| "step": 63750 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 2.8986906215935805e-05, |
| "loss": 4.9997, |
| "step": 64000 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 2.8904823818341804e-05, |
| "loss": 4.9997, |
| "step": 64250 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 2.8822741420747807e-05, |
| "loss": 5.0058, |
| "step": 64500 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 2.8740659023153803e-05, |
| "loss": 4.9975, |
| "step": 64750 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 2.8658576625559803e-05, |
| "loss": 4.9933, |
| "step": 65000 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 2.85764942279658e-05, |
| "loss": 4.9868, |
| "step": 65250 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 2.8494411830371802e-05, |
| "loss": 4.981, |
| "step": 65500 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 2.84123294327778e-05, |
| "loss": 5.0185, |
| "step": 65750 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 2.8330247035183798e-05, |
| "loss": 4.9715, |
| "step": 66000 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 2.8248164637589797e-05, |
| "loss": 5.0356, |
| "step": 66250 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 2.81660822399958e-05, |
| "loss": 4.9427, |
| "step": 66500 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 2.80839998424018e-05, |
| "loss": 4.8855, |
| "step": 66750 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 2.8001917444807796e-05, |
| "loss": 4.9676, |
| "step": 67000 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 2.7919835047213795e-05, |
| "loss": 4.9374, |
| "step": 67250 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 2.7837752649619798e-05, |
| "loss": 4.9648, |
| "step": 67500 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 2.7755670252025794e-05, |
| "loss": 4.9769, |
| "step": 67750 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 2.7673587854431793e-05, |
| "loss": 4.8962, |
| "step": 68000 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 2.759150545683779e-05, |
| "loss": 4.9559, |
| "step": 68250 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 2.7509423059243792e-05, |
| "loss": 4.929, |
| "step": 68500 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 2.7427340661649792e-05, |
| "loss": 4.946, |
| "step": 68750 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 2.7345258264055788e-05, |
| "loss": 4.9562, |
| "step": 69000 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 2.7263175866461788e-05, |
| "loss": 4.9309, |
| "step": 69250 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 2.718109346886779e-05, |
| "loss": 4.9074, |
| "step": 69500 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 2.709901107127379e-05, |
| "loss": 4.8918, |
| "step": 69750 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 2.7016928673679786e-05, |
| "loss": 4.9262, |
| "step": 70000 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 2.6934846276085786e-05, |
| "loss": 4.8864, |
| "step": 70250 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 2.685276387849179e-05, |
| "loss": 4.9013, |
| "step": 70500 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 2.6770681480897785e-05, |
| "loss": 4.8744, |
| "step": 70750 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 2.6688599083303784e-05, |
| "loss": 4.881, |
| "step": 71000 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 2.6606516685709787e-05, |
| "loss": 4.911, |
| "step": 71250 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 2.6524434288115786e-05, |
| "loss": 4.8839, |
| "step": 71500 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 2.6442351890521783e-05, |
| "loss": 4.8996, |
| "step": 71750 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 2.6360269492927782e-05, |
| "loss": 4.9019, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 2.6278187095333785e-05, |
| "loss": 4.8942, |
| "step": 72250 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 2.619610469773978e-05, |
| "loss": 4.8242, |
| "step": 72500 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 2.611402230014578e-05, |
| "loss": 4.8776, |
| "step": 72750 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 2.6031939902551777e-05, |
| "loss": 4.8732, |
| "step": 73000 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 2.594985750495778e-05, |
| "loss": 4.8902, |
| "step": 73250 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 2.586777510736378e-05, |
| "loss": 4.8638, |
| "step": 73500 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 2.5785692709769775e-05, |
| "loss": 4.8418, |
| "step": 73750 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 2.5703610312175775e-05, |
| "loss": 4.8541, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 2.5621527914581777e-05, |
| "loss": 4.8397, |
| "step": 74250 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 2.5539445516987777e-05, |
| "loss": 4.8609, |
| "step": 74500 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 2.5457363119393773e-05, |
| "loss": 4.8698, |
| "step": 74750 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 2.5375280721799773e-05, |
| "loss": 4.88, |
| "step": 75000 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 2.5293198324205775e-05, |
| "loss": 4.8273, |
| "step": 75250 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 2.521111592661177e-05, |
| "loss": 4.8285, |
| "step": 75500 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 2.512903352901777e-05, |
| "loss": 4.853, |
| "step": 75750 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 2.5046951131423767e-05, |
| "loss": 4.8384, |
| "step": 76000 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 2.496486873382977e-05, |
| "loss": 4.7944, |
| "step": 76250 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 2.488278633623577e-05, |
| "loss": 4.8423, |
| "step": 76500 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 2.4800703938641766e-05, |
| "loss": 4.797, |
| "step": 76750 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 2.471862154104777e-05, |
| "loss": 4.7585, |
| "step": 77000 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 2.4636539143453765e-05, |
| "loss": 4.8479, |
| "step": 77250 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 2.4554456745859767e-05, |
| "loss": 4.7687, |
| "step": 77500 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 2.4472374348265764e-05, |
| "loss": 4.793, |
| "step": 77750 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 2.4390291950671763e-05, |
| "loss": 4.8033, |
| "step": 78000 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 2.4308209553077763e-05, |
| "loss": 4.8058, |
| "step": 78250 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 2.4226127155483762e-05, |
| "loss": 4.8193, |
| "step": 78500 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 2.414404475788976e-05, |
| "loss": 4.8197, |
| "step": 78750 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 2.406196236029576e-05, |
| "loss": 4.817, |
| "step": 79000 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 2.397987996270176e-05, |
| "loss": 4.7666, |
| "step": 79250 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 2.389779756510776e-05, |
| "loss": 4.779, |
| "step": 79500 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 2.381571516751376e-05, |
| "loss": 4.7867, |
| "step": 79750 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 2.373363276991976e-05, |
| "loss": 4.711, |
| "step": 80000 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 2.3651550372325755e-05, |
| "loss": 4.764, |
| "step": 80250 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 2.3569467974731758e-05, |
| "loss": 4.7442, |
| "step": 80500 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 2.3487385577137754e-05, |
| "loss": 4.7251, |
| "step": 80750 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 2.3405303179543757e-05, |
| "loss": 4.7445, |
| "step": 81000 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 2.3323220781949753e-05, |
| "loss": 4.7737, |
| "step": 81250 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 2.3241138384355753e-05, |
| "loss": 4.7479, |
| "step": 81500 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 2.3159055986761752e-05, |
| "loss": 4.7482, |
| "step": 81750 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 2.307697358916775e-05, |
| "loss": 4.7181, |
| "step": 82000 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 2.299489119157375e-05, |
| "loss": 4.7784, |
| "step": 82250 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 2.291280879397975e-05, |
| "loss": 4.7506, |
| "step": 82500 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 2.283072639638575e-05, |
| "loss": 4.7269, |
| "step": 82750 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 2.274864399879175e-05, |
| "loss": 4.7102, |
| "step": 83000 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 2.2666561601197746e-05, |
| "loss": 4.7706, |
| "step": 83250 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 2.258447920360375e-05, |
| "loss": 4.7343, |
| "step": 83500 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 2.2502396806009745e-05, |
| "loss": 4.6911, |
| "step": 83750 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 2.2420314408415748e-05, |
| "loss": 4.7585, |
| "step": 84000 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 2.2338232010821744e-05, |
| "loss": 4.6949, |
| "step": 84250 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 2.2256149613227743e-05, |
| "loss": 4.7423, |
| "step": 84500 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 2.2174067215633743e-05, |
| "loss": 4.7317, |
| "step": 84750 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 2.2091984818039742e-05, |
| "loss": 4.7393, |
| "step": 85000 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 2.200990242044574e-05, |
| "loss": 4.7046, |
| "step": 85250 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 2.192782002285174e-05, |
| "loss": 4.77, |
| "step": 85500 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 2.184573762525774e-05, |
| "loss": 4.7457, |
| "step": 85750 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 2.176365522766374e-05, |
| "loss": 4.6749, |
| "step": 86000 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 2.168157283006974e-05, |
| "loss": 4.6972, |
| "step": 86250 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 2.159949043247574e-05, |
| "loss": 4.6846, |
| "step": 86500 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 2.1517408034881735e-05, |
| "loss": 4.7132, |
| "step": 86750 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 2.1435325637287738e-05, |
| "loss": 4.6606, |
| "step": 87000 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 2.1353243239693734e-05, |
| "loss": 4.6989, |
| "step": 87250 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 2.1271160842099737e-05, |
| "loss": 4.7223, |
| "step": 87500 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 2.1189078444505733e-05, |
| "loss": 4.6806, |
| "step": 87750 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 2.1106996046911733e-05, |
| "loss": 4.6947, |
| "step": 88000 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 2.1024913649317732e-05, |
| "loss": 4.712, |
| "step": 88250 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 2.094283125172373e-05, |
| "loss": 4.7026, |
| "step": 88500 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 2.086074885412973e-05, |
| "loss": 4.714, |
| "step": 88750 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 2.077866645653573e-05, |
| "loss": 4.6463, |
| "step": 89000 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 2.069658405894173e-05, |
| "loss": 4.6939, |
| "step": 89250 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 2.061450166134773e-05, |
| "loss": 4.6927, |
| "step": 89500 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 2.0532419263753726e-05, |
| "loss": 4.6733, |
| "step": 89750 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 2.045033686615973e-05, |
| "loss": 4.6651, |
| "step": 90000 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 2.0368254468565725e-05, |
| "loss": 4.6826, |
| "step": 90250 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 2.0286172070971728e-05, |
| "loss": 4.6593, |
| "step": 90500 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 2.0204089673377724e-05, |
| "loss": 4.6748, |
| "step": 90750 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 2.0122007275783723e-05, |
| "loss": 4.6453, |
| "step": 91000 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 2.0039924878189723e-05, |
| "loss": 4.6764, |
| "step": 91250 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.9957842480595722e-05, |
| "loss": 4.6282, |
| "step": 91500 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.987576008300172e-05, |
| "loss": 4.6437, |
| "step": 91750 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.979367768540772e-05, |
| "loss": 4.6456, |
| "step": 92000 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.971159528781372e-05, |
| "loss": 4.6155, |
| "step": 92250 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.962951289021972e-05, |
| "loss": 4.609, |
| "step": 92500 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.9547430492625716e-05, |
| "loss": 4.6056, |
| "step": 92750 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.946534809503172e-05, |
| "loss": 4.6138, |
| "step": 93000 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.9383265697437715e-05, |
| "loss": 4.6259, |
| "step": 93250 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.9301183299843718e-05, |
| "loss": 4.651, |
| "step": 93500 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.9219100902249714e-05, |
| "loss": 4.6287, |
| "step": 93750 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.9137018504655717e-05, |
| "loss": 4.6163, |
| "step": 94000 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.9054936107061713e-05, |
| "loss": 4.5989, |
| "step": 94250 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.8972853709467713e-05, |
| "loss": 4.6224, |
| "step": 94500 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.8890771311873712e-05, |
| "loss": 4.5913, |
| "step": 94750 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.8808688914279712e-05, |
| "loss": 4.6267, |
| "step": 95000 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.872660651668571e-05, |
| "loss": 4.5906, |
| "step": 95250 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.864452411909171e-05, |
| "loss": 4.6433, |
| "step": 95500 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.856244172149771e-05, |
| "loss": 4.6444, |
| "step": 95750 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.848035932390371e-05, |
| "loss": 4.6358, |
| "step": 96000 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.8398276926309706e-05, |
| "loss": 4.577, |
| "step": 96250 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.831619452871571e-05, |
| "loss": 4.6281, |
| "step": 96500 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.8234112131121705e-05, |
| "loss": 4.613, |
| "step": 96750 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.8152029733527708e-05, |
| "loss": 4.6085, |
| "step": 97000 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.8069947335933704e-05, |
| "loss": 4.6347, |
| "step": 97250 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.7987864938339703e-05, |
| "loss": 4.6094, |
| "step": 97500 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.7905782540745703e-05, |
| "loss": 4.6255, |
| "step": 97750 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.7823700143151702e-05, |
| "loss": 4.5426, |
| "step": 98000 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.7741617745557702e-05, |
| "loss": 4.594, |
| "step": 98250 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.76595353479637e-05, |
| "loss": 4.5371, |
| "step": 98500 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.75774529503697e-05, |
| "loss": 4.6085, |
| "step": 98750 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.74953705527757e-05, |
| "loss": 4.6097, |
| "step": 99000 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.7413288155181696e-05, |
| "loss": 4.5315, |
| "step": 99250 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.73312057575877e-05, |
| "loss": 4.5781, |
| "step": 99500 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.7249123359993695e-05, |
| "loss": 4.5609, |
| "step": 99750 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.7167040962399698e-05, |
| "loss": 4.5971, |
| "step": 100000 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.7084958564805694e-05, |
| "loss": 4.5534, |
| "step": 100250 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.7002876167211694e-05, |
| "loss": 4.5275, |
| "step": 100500 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.6920793769617693e-05, |
| "loss": 4.5443, |
| "step": 100750 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.6838711372023693e-05, |
| "loss": 4.5559, |
| "step": 101000 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.6756628974429692e-05, |
| "loss": 4.5289, |
| "step": 101250 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.6674546576835692e-05, |
| "loss": 4.574, |
| "step": 101500 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_loss": 4.450372695922852, |
| "eval_runtime": 619.9479, |
| "eval_samples_per_second": 291.173, |
| "eval_steps_per_second": 9.099, |
| "step": 101524 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.659246417924169e-05, |
| "loss": 4.4787, |
| "step": 101750 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.651038178164769e-05, |
| "loss": 4.5425, |
| "step": 102000 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.642829938405369e-05, |
| "loss": 4.5304, |
| "step": 102250 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.634621698645969e-05, |
| "loss": 4.5141, |
| "step": 102500 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.6264134588865686e-05, |
| "loss": 4.5189, |
| "step": 102750 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.618205219127169e-05, |
| "loss": 4.5433, |
| "step": 103000 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.6099969793677685e-05, |
| "loss": 4.561, |
| "step": 103250 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.6017887396083688e-05, |
| "loss": 4.5514, |
| "step": 103500 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.5935804998489684e-05, |
| "loss": 4.5083, |
| "step": 103750 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.5853722600895683e-05, |
| "loss": 4.524, |
| "step": 104000 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.5771640203301683e-05, |
| "loss": 4.5076, |
| "step": 104250 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.5689557805707682e-05, |
| "loss": 4.5146, |
| "step": 104500 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.5607475408113682e-05, |
| "loss": 4.4952, |
| "step": 104750 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.552539301051968e-05, |
| "loss": 4.513, |
| "step": 105000 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.544331061292568e-05, |
| "loss": 4.5001, |
| "step": 105250 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.536122821533168e-05, |
| "loss": 4.5537, |
| "step": 105500 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.5279145817737676e-05, |
| "loss": 4.5184, |
| "step": 105750 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.519706342014368e-05, |
| "loss": 4.4982, |
| "step": 106000 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.5114981022549677e-05, |
| "loss": 4.497, |
| "step": 106250 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.5032898624955677e-05, |
| "loss": 4.4993, |
| "step": 106500 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.4950816227361674e-05, |
| "loss": 4.5091, |
| "step": 106750 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.4868733829767676e-05, |
| "loss": 4.5167, |
| "step": 107000 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.4786651432173673e-05, |
| "loss": 4.4561, |
| "step": 107250 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.4704569034579675e-05, |
| "loss": 4.5323, |
| "step": 107500 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.4622486636985672e-05, |
| "loss": 4.5174, |
| "step": 107750 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.4540404239391672e-05, |
| "loss": 4.5102, |
| "step": 108000 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.445832184179767e-05, |
| "loss": 4.4722, |
| "step": 108250 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.437623944420367e-05, |
| "loss": 4.4973, |
| "step": 108500 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.4294157046609669e-05, |
| "loss": 4.4885, |
| "step": 108750 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.421207464901567e-05, |
| "loss": 4.4767, |
| "step": 109000 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.4129992251421668e-05, |
| "loss": 4.4758, |
| "step": 109250 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.4047909853827667e-05, |
| "loss": 4.4632, |
| "step": 109500 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.3965827456233665e-05, |
| "loss": 4.4848, |
| "step": 109750 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.3883745058639666e-05, |
| "loss": 4.4566, |
| "step": 110000 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.3801662661045664e-05, |
| "loss": 4.4524, |
| "step": 110250 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.3719580263451665e-05, |
| "loss": 4.448, |
| "step": 110500 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.3637497865857663e-05, |
| "loss": 4.458, |
| "step": 110750 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.3555415468263662e-05, |
| "loss": 4.4448, |
| "step": 111000 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.347333307066966e-05, |
| "loss": 4.4719, |
| "step": 111250 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.3391250673075661e-05, |
| "loss": 4.4535, |
| "step": 111500 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.3309168275481659e-05, |
| "loss": 4.4429, |
| "step": 111750 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.322708587788766e-05, |
| "loss": 4.4854, |
| "step": 112000 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.3145003480293658e-05, |
| "loss": 4.409, |
| "step": 112250 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.3062921082699658e-05, |
| "loss": 4.4335, |
| "step": 112500 |
| } |
| ], |
| "logging_steps": 250, |
| "max_steps": 152286, |
| "num_train_epochs": 3, |
| "save_steps": 2048, |
| "total_flos": 4.66845448223232e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|