| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.0, |
| "eval_steps": 500, |
| "global_step": 1548, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.010342598577892695, |
| "grad_norm": 0.27074137330055237, |
| "learning_rate": 1.5384615384615385e-07, |
| "loss": 0.5239, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02068519715578539, |
| "grad_norm": 0.2690924108028412, |
| "learning_rate": 3.5897435897435896e-07, |
| "loss": 0.519, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.031027795733678087, |
| "grad_norm": 0.28973960876464844, |
| "learning_rate": 5.641025641025641e-07, |
| "loss": 0.5216, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.04137039431157078, |
| "grad_norm": 0.2677304744720459, |
| "learning_rate": 7.692307692307693e-07, |
| "loss": 0.5403, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.051712992889463474, |
| "grad_norm": 0.2680451571941376, |
| "learning_rate": 9.743589743589742e-07, |
| "loss": 0.518, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.062055591467356175, |
| "grad_norm": 0.25933119654655457, |
| "learning_rate": 1.1794871794871795e-06, |
| "loss": 0.5192, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07239819004524888, |
| "grad_norm": 0.25746598839759827, |
| "learning_rate": 1.3846153846153844e-06, |
| "loss": 0.5164, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08274078862314156, |
| "grad_norm": 0.2424672544002533, |
| "learning_rate": 1.5897435897435895e-06, |
| "loss": 0.5016, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09308338720103426, |
| "grad_norm": 0.22520849108695984, |
| "learning_rate": 1.7948717948717948e-06, |
| "loss": 0.5133, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.10342598577892695, |
| "grad_norm": 0.208700492978096, |
| "learning_rate": 2e-06, |
| "loss": 0.503, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.11376858435681965, |
| "grad_norm": 0.2092197686433792, |
| "learning_rate": 2.2051282051282052e-06, |
| "loss": 0.5016, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.12411118293471235, |
| "grad_norm": 0.20155887305736542, |
| "learning_rate": 2.41025641025641e-06, |
| "loss": 0.4955, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.13445378151260504, |
| "grad_norm": 0.20360776782035828, |
| "learning_rate": 2.6153846153846154e-06, |
| "loss": 0.51, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.14479638009049775, |
| "grad_norm": 0.20055116713047028, |
| "learning_rate": 2.8205128205128207e-06, |
| "loss": 0.5073, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.15513897866839044, |
| "grad_norm": 0.2022508680820465, |
| "learning_rate": 3.0256410256410256e-06, |
| "loss": 0.4918, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.16548157724628312, |
| "grad_norm": 0.1955750733613968, |
| "learning_rate": 3.230769230769231e-06, |
| "loss": 0.4917, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.17582417582417584, |
| "grad_norm": 0.18670685589313507, |
| "learning_rate": 3.4358974358974353e-06, |
| "loss": 0.4936, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.18616677440206852, |
| "grad_norm": 0.192545086145401, |
| "learning_rate": 3.6410256410256406e-06, |
| "loss": 0.4972, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.1965093729799612, |
| "grad_norm": 0.1836402714252472, |
| "learning_rate": 3.846153846153846e-06, |
| "loss": 0.4823, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.2068519715578539, |
| "grad_norm": 0.18833868205547333, |
| "learning_rate": 3.993881715007257e-06, |
| "loss": 0.4825, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.2171945701357466, |
| "grad_norm": 0.18157020211219788, |
| "learning_rate": 3.969491760330399e-06, |
| "loss": 0.4829, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.2275371687136393, |
| "grad_norm": 0.17653386294841766, |
| "learning_rate": 3.945234630128603e-06, |
| "loss": 0.4886, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.23787976729153198, |
| "grad_norm": 0.18460187315940857, |
| "learning_rate": 3.9211099617411865e-06, |
| "loss": 0.491, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.2482223658694247, |
| "grad_norm": 0.17642319202423096, |
| "learning_rate": 3.897117392507468e-06, |
| "loss": 0.4857, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.2585649644473174, |
| "grad_norm": 0.17706429958343506, |
| "learning_rate": 3.873256559766764e-06, |
| "loss": 0.494, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.2689075630252101, |
| "grad_norm": 0.1816156804561615, |
| "learning_rate": 3.8495271008583914e-06, |
| "loss": 0.4911, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.2792501616031028, |
| "grad_norm": 0.19248028099536896, |
| "learning_rate": 3.825928653121668e-06, |
| "loss": 0.4866, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.2895927601809955, |
| "grad_norm": 0.17712807655334473, |
| "learning_rate": 3.80246085389591e-06, |
| "loss": 0.4763, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.29993535875888816, |
| "grad_norm": 0.18759045004844666, |
| "learning_rate": 3.779123340520437e-06, |
| "loss": 0.4812, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.3102779573367809, |
| "grad_norm": 0.19693388044834137, |
| "learning_rate": 3.755915750334564e-06, |
| "loss": 0.4783, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3206205559146736, |
| "grad_norm": 0.1827310472726822, |
| "learning_rate": 3.7328377206776083e-06, |
| "loss": 0.477, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.33096315449256625, |
| "grad_norm": 0.18313142657279968, |
| "learning_rate": 3.7098888888888884e-06, |
| "loss": 0.496, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.34130575307045896, |
| "grad_norm": 0.17326150834560394, |
| "learning_rate": 3.687068892307722e-06, |
| "loss": 0.4778, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.3516483516483517, |
| "grad_norm": 0.17738491296768188, |
| "learning_rate": 3.664377368273423e-06, |
| "loss": 0.4813, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.36199095022624433, |
| "grad_norm": 0.18592627346515656, |
| "learning_rate": 3.6418139541253126e-06, |
| "loss": 0.4783, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.37233354880413705, |
| "grad_norm": 0.17538052797317505, |
| "learning_rate": 3.6193782872027053e-06, |
| "loss": 0.4726, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.3826761473820297, |
| "grad_norm": 0.1824338138103485, |
| "learning_rate": 3.5970700048449203e-06, |
| "loss": 0.4637, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.3930187459599224, |
| "grad_norm": 0.1739794909954071, |
| "learning_rate": 3.5748887443912734e-06, |
| "loss": 0.4832, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.40336134453781514, |
| "grad_norm": 0.19277994334697723, |
| "learning_rate": 3.552834143181082e-06, |
| "loss": 0.4773, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.4137039431157078, |
| "grad_norm": 0.1671997308731079, |
| "learning_rate": 3.5309058385536637e-06, |
| "loss": 0.4739, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.4240465416936005, |
| "grad_norm": 0.17366255819797516, |
| "learning_rate": 3.5091034678483365e-06, |
| "loss": 0.4872, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.4343891402714932, |
| "grad_norm": 0.17070995271205902, |
| "learning_rate": 3.4874266684044155e-06, |
| "loss": 0.4593, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.4447317388493859, |
| "grad_norm": 0.1726539433002472, |
| "learning_rate": 3.4658750775612206e-06, |
| "loss": 0.4682, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.4550743374272786, |
| "grad_norm": 0.17803026735782623, |
| "learning_rate": 3.4444483326580667e-06, |
| "loss": 0.4623, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.4654169360051713, |
| "grad_norm": 0.18144938349723816, |
| "learning_rate": 3.423146071034272e-06, |
| "loss": 0.4694, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.47575953458306397, |
| "grad_norm": 0.18337614834308624, |
| "learning_rate": 3.401967930029155e-06, |
| "loss": 0.4697, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.4861021331609567, |
| "grad_norm": 0.18037910759449005, |
| "learning_rate": 3.3809135469820304e-06, |
| "loss": 0.4664, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.4964447317388494, |
| "grad_norm": 0.1738828718662262, |
| "learning_rate": 3.3599825592322174e-06, |
| "loss": 0.4697, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.5067873303167421, |
| "grad_norm": 0.17767304182052612, |
| "learning_rate": 3.3391746041190326e-06, |
| "loss": 0.4866, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.5171299288946348, |
| "grad_norm": 0.17905977368354797, |
| "learning_rate": 3.3184893189817926e-06, |
| "loss": 0.4755, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.5274725274725275, |
| "grad_norm": 0.18217241764068604, |
| "learning_rate": 3.2979263411598156e-06, |
| "loss": 0.4801, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.5378151260504201, |
| "grad_norm": 0.17312566936016083, |
| "learning_rate": 3.2774853079924182e-06, |
| "loss": 0.4772, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.5481577246283129, |
| "grad_norm": 0.17987386882305145, |
| "learning_rate": 3.257165856818918e-06, |
| "loss": 0.4731, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.5585003232062056, |
| "grad_norm": 0.17333056032657623, |
| "learning_rate": 3.236967624978633e-06, |
| "loss": 0.4725, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.5688429217840982, |
| "grad_norm": 0.18219755589962006, |
| "learning_rate": 3.2168902498108775e-06, |
| "loss": 0.4758, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.579185520361991, |
| "grad_norm": 0.17562930285930634, |
| "learning_rate": 3.196933368654973e-06, |
| "loss": 0.4643, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.5895281189398837, |
| "grad_norm": 0.21644935011863708, |
| "learning_rate": 3.1770966188502337e-06, |
| "loss": 0.4794, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.5998707175177763, |
| "grad_norm": 0.18018363416194916, |
| "learning_rate": 3.157379637735977e-06, |
| "loss": 0.4668, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.6102133160956691, |
| "grad_norm": 0.1677103340625763, |
| "learning_rate": 3.137782062651522e-06, |
| "loss": 0.4564, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.6205559146735617, |
| "grad_norm": 0.17912864685058594, |
| "learning_rate": 3.1183035309361833e-06, |
| "loss": 0.4603, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.6308985132514544, |
| "grad_norm": 0.18761886656284332, |
| "learning_rate": 3.0989436799292814e-06, |
| "loss": 0.4799, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.6412411118293472, |
| "grad_norm": 0.17829325795173645, |
| "learning_rate": 3.079702146970131e-06, |
| "loss": 0.4718, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.6515837104072398, |
| "grad_norm": 0.19033263623714447, |
| "learning_rate": 3.060578569398049e-06, |
| "loss": 0.4751, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.6619263089851325, |
| "grad_norm": 0.1795780062675476, |
| "learning_rate": 3.0415725845523547e-06, |
| "loss": 0.4604, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.6722689075630253, |
| "grad_norm": 0.18677003681659698, |
| "learning_rate": 3.022683829772365e-06, |
| "loss": 0.4677, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.6826115061409179, |
| "grad_norm": 0.1794733852148056, |
| "learning_rate": 3.0039119423973945e-06, |
| "loss": 0.4671, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.6929541047188106, |
| "grad_norm": 0.1785053014755249, |
| "learning_rate": 2.9852565597667635e-06, |
| "loss": 0.4761, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.7032967032967034, |
| "grad_norm": 0.17754320800304413, |
| "learning_rate": 2.9667173192197887e-06, |
| "loss": 0.4707, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.713639301874596, |
| "grad_norm": 0.17548543214797974, |
| "learning_rate": 2.9482938580957856e-06, |
| "loss": 0.4534, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.7239819004524887, |
| "grad_norm": 0.17609195411205292, |
| "learning_rate": 2.9299858137340735e-06, |
| "loss": 0.4591, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.7343244990303813, |
| "grad_norm": 0.17191891372203827, |
| "learning_rate": 2.911792823473968e-06, |
| "loss": 0.4697, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.7446670976082741, |
| "grad_norm": 0.18207724392414093, |
| "learning_rate": 2.8937145246547875e-06, |
| "loss": 0.4654, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.7550096961861668, |
| "grad_norm": 0.17942476272583008, |
| "learning_rate": 2.8757505546158493e-06, |
| "loss": 0.4688, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.7653522947640594, |
| "grad_norm": 0.18172645568847656, |
| "learning_rate": 2.857900550696469e-06, |
| "loss": 0.4616, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.7756948933419522, |
| "grad_norm": 0.18675239384174347, |
| "learning_rate": 2.8401641502359657e-06, |
| "loss": 0.4612, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.7860374919198448, |
| "grad_norm": 0.18713857233524323, |
| "learning_rate": 2.822540990573655e-06, |
| "loss": 0.468, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.7963800904977375, |
| "grad_norm": 0.192363440990448, |
| "learning_rate": 2.805030709048856e-06, |
| "loss": 0.472, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.8067226890756303, |
| "grad_norm": 0.17535482347011566, |
| "learning_rate": 2.787632943000885e-06, |
| "loss": 0.4651, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.8170652876535229, |
| "grad_norm": 0.1680748015642166, |
| "learning_rate": 2.7703473297690585e-06, |
| "loss": 0.476, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.8274078862314156, |
| "grad_norm": 0.1780477613210678, |
| "learning_rate": 2.753173506692695e-06, |
| "loss": 0.4715, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.8377504848093084, |
| "grad_norm": 0.17565058171749115, |
| "learning_rate": 2.7361111111111114e-06, |
| "loss": 0.4724, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.848093083387201, |
| "grad_norm": 0.17358048260211945, |
| "learning_rate": 2.719159780363624e-06, |
| "loss": 0.4819, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.8584356819650937, |
| "grad_norm": 0.1844065934419632, |
| "learning_rate": 2.702319151789551e-06, |
| "loss": 0.4769, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.8687782805429864, |
| "grad_norm": 0.1706087440252304, |
| "learning_rate": 2.6855888627282097e-06, |
| "loss": 0.4688, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.8791208791208791, |
| "grad_norm": 0.17604754865169525, |
| "learning_rate": 2.6689685505189168e-06, |
| "loss": 0.4604, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.8894634776987718, |
| "grad_norm": 0.19641447067260742, |
| "learning_rate": 2.6524578525009895e-06, |
| "loss": 0.4619, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.8998060762766645, |
| "grad_norm": 0.1839519441127777, |
| "learning_rate": 2.6360564060137445e-06, |
| "loss": 0.4658, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.9101486748545572, |
| "grad_norm": 0.17227011919021606, |
| "learning_rate": 2.6197638483965012e-06, |
| "loss": 0.4657, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.9204912734324499, |
| "grad_norm": 0.18372756242752075, |
| "learning_rate": 2.6035798169885755e-06, |
| "loss": 0.4752, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.9308338720103426, |
| "grad_norm": 0.17142316699028015, |
| "learning_rate": 2.5875039491292835e-06, |
| "loss": 0.4648, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.9411764705882353, |
| "grad_norm": 0.20659951865673065, |
| "learning_rate": 2.5715358821579443e-06, |
| "loss": 0.4564, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.9515190691661279, |
| "grad_norm": 0.17238274216651917, |
| "learning_rate": 2.5556752534138737e-06, |
| "loss": 0.4509, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.9618616677440207, |
| "grad_norm": 0.1769212931394577, |
| "learning_rate": 2.5399217002363903e-06, |
| "loss": 0.4631, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.9722042663219134, |
| "grad_norm": 0.1703854501247406, |
| "learning_rate": 2.5242748599648104e-06, |
| "loss": 0.4665, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.982546864899806, |
| "grad_norm": 0.17398501932621002, |
| "learning_rate": 2.5087343699384522e-06, |
| "loss": 0.4728, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.9928894634776988, |
| "grad_norm": 0.20379476249217987, |
| "learning_rate": 2.4932998674966306e-06, |
| "loss": 0.4647, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.0025856496444732, |
| "grad_norm": 0.172428697347641, |
| "learning_rate": 2.4779709899786656e-06, |
| "loss": 0.4485, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.0129282482223658, |
| "grad_norm": 0.18400946259498596, |
| "learning_rate": 2.4627473747238725e-06, |
| "loss": 0.4546, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.0232708468002585, |
| "grad_norm": 0.177312433719635, |
| "learning_rate": 2.44762865907157e-06, |
| "loss": 0.4642, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.0336134453781514, |
| "grad_norm": 0.19368407130241394, |
| "learning_rate": 2.4326144803610743e-06, |
| "loss": 0.4703, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.043956043956044, |
| "grad_norm": 0.18584667146205902, |
| "learning_rate": 2.4177044759317023e-06, |
| "loss": 0.4672, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.0542986425339367, |
| "grad_norm": 0.1793242245912552, |
| "learning_rate": 2.402898283122773e-06, |
| "loss": 0.4519, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.0646412411118293, |
| "grad_norm": 0.17912016808986664, |
| "learning_rate": 2.3881955392736017e-06, |
| "loss": 0.4636, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.074983839689722, |
| "grad_norm": 0.17298536002635956, |
| "learning_rate": 2.373595881723507e-06, |
| "loss": 0.4512, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.0853264382676147, |
| "grad_norm": 0.1726359874010086, |
| "learning_rate": 2.3590989478118058e-06, |
| "loss": 0.4699, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.0956690368455075, |
| "grad_norm": 0.17390099167823792, |
| "learning_rate": 2.3447043748778143e-06, |
| "loss": 0.4587, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.1060116354234002, |
| "grad_norm": 0.25595802068710327, |
| "learning_rate": 2.3304118002608516e-06, |
| "loss": 0.4615, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.1163542340012929, |
| "grad_norm": 0.17193318903446198, |
| "learning_rate": 2.3162208613002327e-06, |
| "loss": 0.4624, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.1266968325791855, |
| "grad_norm": 0.19484907388687134, |
| "learning_rate": 2.302131195335277e-06, |
| "loss": 0.4623, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.1370394311570782, |
| "grad_norm": 0.1798972338438034, |
| "learning_rate": 2.2881424397053003e-06, |
| "loss": 0.4649, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.1473820297349708, |
| "grad_norm": 0.18051382899284363, |
| "learning_rate": 2.274254231749621e-06, |
| "loss": 0.4601, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.1577246283128635, |
| "grad_norm": 0.17082150280475616, |
| "learning_rate": 2.2604662088075545e-06, |
| "loss": 0.4499, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.1680672268907564, |
| "grad_norm": 0.1901365965604782, |
| "learning_rate": 2.2467780082184194e-06, |
| "loss": 0.4593, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.178409825468649, |
| "grad_norm": 0.1783684343099594, |
| "learning_rate": 2.2331892673215335e-06, |
| "loss": 0.4529, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.1887524240465417, |
| "grad_norm": 0.19588534533977509, |
| "learning_rate": 2.219699623456213e-06, |
| "loss": 0.4602, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.1990950226244343, |
| "grad_norm": 0.20320181548595428, |
| "learning_rate": 2.206308713961775e-06, |
| "loss": 0.4541, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.209437621202327, |
| "grad_norm": 0.18522217869758606, |
| "learning_rate": 2.1930161761775375e-06, |
| "loss": 0.4607, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.2197802197802199, |
| "grad_norm": 0.1711958944797516, |
| "learning_rate": 2.179821647442817e-06, |
| "loss": 0.4604, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.2301228183581125, |
| "grad_norm": 0.1926986277103424, |
| "learning_rate": 2.166724765096931e-06, |
| "loss": 0.4687, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.2404654169360052, |
| "grad_norm": 0.17525337636470795, |
| "learning_rate": 2.1537251664791973e-06, |
| "loss": 0.4597, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.2508080155138979, |
| "grad_norm": 0.19414155185222626, |
| "learning_rate": 2.1408224889289327e-06, |
| "loss": 0.4511, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.2611506140917905, |
| "grad_norm": 0.1930965930223465, |
| "learning_rate": 2.1280163697854547e-06, |
| "loss": 0.4648, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.2714932126696832, |
| "grad_norm": 0.1782602071762085, |
| "learning_rate": 2.115306446388079e-06, |
| "loss": 0.4565, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.2818358112475758, |
| "grad_norm": 0.1658431589603424, |
| "learning_rate": 2.1026923560761246e-06, |
| "loss": 0.4684, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.2921784098254687, |
| "grad_norm": 0.17630533874034882, |
| "learning_rate": 2.090173736188908e-06, |
| "loss": 0.4689, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.3025210084033614, |
| "grad_norm": 0.16866503655910492, |
| "learning_rate": 2.0777502240657472e-06, |
| "loss": 0.4551, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.312863606981254, |
| "grad_norm": 0.17106667160987854, |
| "learning_rate": 2.065421457045959e-06, |
| "loss": 0.4618, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.3232062055591467, |
| "grad_norm": 0.17253316938877106, |
| "learning_rate": 2.0531870724688596e-06, |
| "loss": 0.4585, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.3335488041370394, |
| "grad_norm": 0.19091737270355225, |
| "learning_rate": 2.0410467076737674e-06, |
| "loss": 0.4594, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.3438914027149322, |
| "grad_norm": 0.17556996643543243, |
| "learning_rate": 2.0289999999999995e-06, |
| "loss": 0.449, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.354234001292825, |
| "grad_norm": 0.17378376424312592, |
| "learning_rate": 2.017046586786874e-06, |
| "loss": 0.46, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.3645765998707176, |
| "grad_norm": 0.17842325568199158, |
| "learning_rate": 2.005186105373706e-06, |
| "loss": 0.4457, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.3749191984486102, |
| "grad_norm": 0.18024806678295135, |
| "learning_rate": 1.993418193099814e-06, |
| "loss": 0.4562, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.3852617970265029, |
| "grad_norm": 0.1783292591571808, |
| "learning_rate": 1.9817424873045148e-06, |
| "loss": 0.4555, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.3956043956043955, |
| "grad_norm": 0.19366991519927979, |
| "learning_rate": 1.970158625327126e-06, |
| "loss": 0.4619, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.4059469941822882, |
| "grad_norm": 0.1875096708536148, |
| "learning_rate": 1.9586662445069657e-06, |
| "loss": 0.463, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.416289592760181, |
| "grad_norm": 0.19434590637683868, |
| "learning_rate": 1.94726498218335e-06, |
| "loss": 0.4568, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.4266321913380737, |
| "grad_norm": 0.18033798038959503, |
| "learning_rate": 1.9359544756955956e-06, |
| "loss": 0.4518, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.4369747899159664, |
| "grad_norm": 0.17548328638076782, |
| "learning_rate": 1.9247343623830206e-06, |
| "loss": 0.4563, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.447317388493859, |
| "grad_norm": 0.18100734055042267, |
| "learning_rate": 1.913604279584942e-06, |
| "loss": 0.453, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.4576599870717517, |
| "grad_norm": 0.17302066087722778, |
| "learning_rate": 1.9025638646406777e-06, |
| "loss": 0.4497, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.4680025856496446, |
| "grad_norm": 0.17910614609718323, |
| "learning_rate": 1.8916127548895444e-06, |
| "loss": 0.4588, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.4783451842275372, |
| "grad_norm": 0.1921713948249817, |
| "learning_rate": 1.8807505876708593e-06, |
| "loss": 0.4631, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.48868778280543, |
| "grad_norm": 0.18404294550418854, |
| "learning_rate": 1.8699770003239392e-06, |
| "loss": 0.4686, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.4990303813833226, |
| "grad_norm": 0.1868065893650055, |
| "learning_rate": 1.859291630188102e-06, |
| "loss": 0.4594, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.5093729799612152, |
| "grad_norm": 0.42297038435935974, |
| "learning_rate": 1.8486941146026646e-06, |
| "loss": 0.4635, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.5197155785391079, |
| "grad_norm": 0.17838597297668457, |
| "learning_rate": 1.8381840909069446e-06, |
| "loss": 0.4479, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.5300581771170005, |
| "grad_norm": 0.16778771579265594, |
| "learning_rate": 1.8277611964402589e-06, |
| "loss": 0.4583, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.5404007756948932, |
| "grad_norm": 0.16617390513420105, |
| "learning_rate": 1.817425068541925e-06, |
| "loss": 0.4544, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.550743374272786, |
| "grad_norm": 0.1774096041917801, |
| "learning_rate": 1.8071753445512594e-06, |
| "loss": 0.4451, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.5610859728506787, |
| "grad_norm": 0.19082525372505188, |
| "learning_rate": 1.7970116618075802e-06, |
| "loss": 0.4666, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.5714285714285714, |
| "grad_norm": 0.26731783151626587, |
| "learning_rate": 1.7869336576502043e-06, |
| "loss": 0.4504, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.5817711700064643, |
| "grad_norm": 0.17997735738754272, |
| "learning_rate": 1.7769409694184493e-06, |
| "loss": 0.4505, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.592113768584357, |
| "grad_norm": 0.1862158477306366, |
| "learning_rate": 1.7670332344516319e-06, |
| "loss": 0.4554, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.6024563671622496, |
| "grad_norm": 0.18264687061309814, |
| "learning_rate": 1.757210090089069e-06, |
| "loss": 0.4568, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.6127989657401423, |
| "grad_norm": 0.18124184012413025, |
| "learning_rate": 1.7474711736700786e-06, |
| "loss": 0.4562, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.623141564318035, |
| "grad_norm": 0.18057860434055328, |
| "learning_rate": 1.737816122533978e-06, |
| "loss": 0.4584, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.6334841628959276, |
| "grad_norm": 0.1704379767179489, |
| "learning_rate": 1.7282445740200844e-06, |
| "loss": 0.4565, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.6438267614738202, |
| "grad_norm": 0.17907528579235077, |
| "learning_rate": 1.7187561654677143e-06, |
| "loss": 0.4551, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.654169360051713, |
| "grad_norm": 0.1727352887392044, |
| "learning_rate": 1.7093505342161853e-06, |
| "loss": 0.4626, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.6645119586296055, |
| "grad_norm": 0.192378431558609, |
| "learning_rate": 1.7000273176048153e-06, |
| "loss": 0.4609, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.6748545572074984, |
| "grad_norm": 0.21348994970321655, |
| "learning_rate": 1.6907861529729202e-06, |
| "loss": 0.4581, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.685197155785391, |
| "grad_norm": 0.17903228104114532, |
| "learning_rate": 1.6816266776598186e-06, |
| "loss": 0.4628, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.6955397543632837, |
| "grad_norm": 0.17800310254096985, |
| "learning_rate": 1.672548529004827e-06, |
| "loss": 0.4625, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.7058823529411766, |
| "grad_norm": 0.17950348556041718, |
| "learning_rate": 1.663551344347263e-06, |
| "loss": 0.4595, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.7162249515190693, |
| "grad_norm": 0.17900501191616058, |
| "learning_rate": 1.654634761026443e-06, |
| "loss": 0.4574, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.726567550096962, |
| "grad_norm": 0.17247159779071808, |
| "learning_rate": 1.645798416381685e-06, |
| "loss": 0.4517, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.7369101486748546, |
| "grad_norm": 0.18945126235485077, |
| "learning_rate": 1.6370419477523063e-06, |
| "loss": 0.4609, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.7472527472527473, |
| "grad_norm": 0.1901276856660843, |
| "learning_rate": 1.628364992477624e-06, |
| "loss": 0.4538, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.75759534583064, |
| "grad_norm": 0.17829464375972748, |
| "learning_rate": 1.6197671878969554e-06, |
| "loss": 0.4481, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.7679379444085326, |
| "grad_norm": 0.1853664517402649, |
| "learning_rate": 1.6112481713496172e-06, |
| "loss": 0.4597, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.7782805429864252, |
| "grad_norm": 0.17670245468616486, |
| "learning_rate": 1.6028075801749268e-06, |
| "loss": 0.4559, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.788623141564318, |
| "grad_norm": 0.16995681822299957, |
| "learning_rate": 1.594445051712202e-06, |
| "loss": 0.4516, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.7989657401422108, |
| "grad_norm": 0.18131399154663086, |
| "learning_rate": 1.58616022330076e-06, |
| "loss": 0.4574, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.8093083387201034, |
| "grad_norm": 0.16902993619441986, |
| "learning_rate": 1.5779527322799175e-06, |
| "loss": 0.4506, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.819650937297996, |
| "grad_norm": 0.18973788619041443, |
| "learning_rate": 1.5698222159889919e-06, |
| "loss": 0.4542, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.829993535875889, |
| "grad_norm": 0.17884907126426697, |
| "learning_rate": 1.5617683117673004e-06, |
| "loss": 0.4627, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.8403361344537816, |
| "grad_norm": 0.1736602783203125, |
| "learning_rate": 1.55379065695416e-06, |
| "loss": 0.447, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.8506787330316743, |
| "grad_norm": 0.1826103925704956, |
| "learning_rate": 1.5458888888888889e-06, |
| "loss": 0.4634, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.861021331609567, |
| "grad_norm": 0.17924711108207703, |
| "learning_rate": 1.5380626449108035e-06, |
| "loss": 0.4436, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.8713639301874596, |
| "grad_norm": 0.17868177592754364, |
| "learning_rate": 1.5303115623592214e-06, |
| "loss": 0.4394, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.8817065287653523, |
| "grad_norm": 0.18871541321277618, |
| "learning_rate": 1.522635278573459e-06, |
| "loss": 0.4698, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.892049127343245, |
| "grad_norm": 0.17588797211647034, |
| "learning_rate": 1.5150334308928344e-06, |
| "loss": 0.4591, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.9023917259211376, |
| "grad_norm": 0.18185260891914368, |
| "learning_rate": 1.507505656656665e-06, |
| "loss": 0.465, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.9127343244990302, |
| "grad_norm": 0.18905200064182281, |
| "learning_rate": 1.5000515932042678e-06, |
| "loss": 0.4609, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.9230769230769231, |
| "grad_norm": 0.19702599942684174, |
| "learning_rate": 1.4926708778749595e-06, |
| "loss": 0.4444, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.9334195216548158, |
| "grad_norm": 0.18513160943984985, |
| "learning_rate": 1.4853631480080578e-06, |
| "loss": 0.4565, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.9437621202327084, |
| "grad_norm": 0.17975644767284393, |
| "learning_rate": 1.4781280409428802e-06, |
| "loss": 0.4474, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.9541047188106013, |
| "grad_norm": 0.1855587661266327, |
| "learning_rate": 1.4709651940187429e-06, |
| "loss": 0.4517, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.964447317388494, |
| "grad_norm": 0.18554966151714325, |
| "learning_rate": 1.4638742445749645e-06, |
| "loss": 0.4654, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.9747899159663866, |
| "grad_norm": 0.18967655301094055, |
| "learning_rate": 1.4568548299508615e-06, |
| "loss": 0.463, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.9851325145442793, |
| "grad_norm": 0.17238129675388336, |
| "learning_rate": 1.4499065874857511e-06, |
| "loss": 0.4525, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.995475113122172, |
| "grad_norm": 0.17824146151542664, |
| "learning_rate": 1.4430291545189505e-06, |
| "loss": 0.4443, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.0051712992889463, |
| "grad_norm": 0.1638309806585312, |
| "learning_rate": 1.4362221683897769e-06, |
| "loss": 0.4533, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.015513897866839, |
| "grad_norm": 0.19044017791748047, |
| "learning_rate": 1.4294852664375482e-06, |
| "loss": 0.4434, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.0258564964447316, |
| "grad_norm": 0.18576934933662415, |
| "learning_rate": 1.422818086001581e-06, |
| "loss": 0.4486, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.0361990950226243, |
| "grad_norm": 0.17555150389671326, |
| "learning_rate": 1.4162202644211928e-06, |
| "loss": 0.4619, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.046541693600517, |
| "grad_norm": 0.18890970945358276, |
| "learning_rate": 1.4096914390357005e-06, |
| "loss": 0.449, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.0568842921784096, |
| "grad_norm": 0.17668835818767548, |
| "learning_rate": 1.4032312471844212e-06, |
| "loss": 0.4613, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.0672268907563027, |
| "grad_norm": 0.17829003930091858, |
| "learning_rate": 1.3968393262066731e-06, |
| "loss": 0.4578, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.0775694893341954, |
| "grad_norm": 0.1794648915529251, |
| "learning_rate": 1.3905153134417726e-06, |
| "loss": 0.4615, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.087912087912088, |
| "grad_norm": 0.1816921979188919, |
| "learning_rate": 1.3842588462290373e-06, |
| "loss": 0.4431, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.0982546864899807, |
| "grad_norm": 0.16857373714447021, |
| "learning_rate": 1.378069561907784e-06, |
| "loss": 0.4508, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.1085972850678734, |
| "grad_norm": 0.18902304768562317, |
| "learning_rate": 1.3719470978173305e-06, |
| "loss": 0.4578, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.118939883645766, |
| "grad_norm": 0.17235969007015228, |
| "learning_rate": 1.3658910912969934e-06, |
| "loss": 0.4504, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.1292824822236587, |
| "grad_norm": 0.1716686636209488, |
| "learning_rate": 1.3599011796860908e-06, |
| "loss": 0.452, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.1396250808015513, |
| "grad_norm": 0.17327377200126648, |
| "learning_rate": 1.3539770003239391e-06, |
| "loss": 0.446, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.149967679379444, |
| "grad_norm": 0.1813182234764099, |
| "learning_rate": 1.3481181905498559e-06, |
| "loss": 0.4481, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.1603102779573367, |
| "grad_norm": 0.17645367980003357, |
| "learning_rate": 1.3423243877031582e-06, |
| "loss": 0.4566, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.1706528765352293, |
| "grad_norm": 0.17306523025035858, |
| "learning_rate": 1.336595229123164e-06, |
| "loss": 0.4514, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.180995475113122, |
| "grad_norm": 0.17180515825748444, |
| "learning_rate": 1.3309303521491894e-06, |
| "loss": 0.4501, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.191338073691015, |
| "grad_norm": 0.17196668684482574, |
| "learning_rate": 1.3253293941205526e-06, |
| "loss": 0.4539, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.2016806722689077, |
| "grad_norm": 0.16701729595661163, |
| "learning_rate": 1.3197919923765701e-06, |
| "loss": 0.4557, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.2120232708468004, |
| "grad_norm": 0.18192018568515778, |
| "learning_rate": 1.3143177842565595e-06, |
| "loss": 0.4508, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.222365869424693, |
| "grad_norm": 0.17954544723033905, |
| "learning_rate": 1.3089064070998384e-06, |
| "loss": 0.4428, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.2327084680025857, |
| "grad_norm": 0.185075044631958, |
| "learning_rate": 1.3035574982457233e-06, |
| "loss": 0.4536, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.2430510665804784, |
| "grad_norm": 0.19131481647491455, |
| "learning_rate": 1.298270695033532e-06, |
| "loss": 0.4552, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.253393665158371, |
| "grad_norm": 0.18936187028884888, |
| "learning_rate": 1.2930456348025814e-06, |
| "loss": 0.4478, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.2637362637362637, |
| "grad_norm": 0.2016269713640213, |
| "learning_rate": 1.2878819548921887e-06, |
| "loss": 0.4534, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.2740788623141563, |
| "grad_norm": 0.21272307634353638, |
| "learning_rate": 1.2827792926416714e-06, |
| "loss": 0.4434, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.284421460892049, |
| "grad_norm": 0.18152523040771484, |
| "learning_rate": 1.2777372853903463e-06, |
| "loss": 0.4502, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.2947640594699417, |
| "grad_norm": 0.17132383584976196, |
| "learning_rate": 1.2727555704775317e-06, |
| "loss": 0.4402, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.3051066580478343, |
| "grad_norm": 0.17692337930202484, |
| "learning_rate": 1.2678337852425434e-06, |
| "loss": 0.4509, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.315449256625727, |
| "grad_norm": 0.18354307115077972, |
| "learning_rate": 1.2629715670246996e-06, |
| "loss": 0.4508, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.32579185520362, |
| "grad_norm": 0.17618027329444885, |
| "learning_rate": 1.258168553163317e-06, |
| "loss": 0.4469, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.3361344537815127, |
| "grad_norm": 0.1840873807668686, |
| "learning_rate": 1.2534243809977136e-06, |
| "loss": 0.4485, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.3464770523594054, |
| "grad_norm": 0.1692686676979065, |
| "learning_rate": 1.2487386878672057e-06, |
| "loss": 0.4532, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.356819650937298, |
| "grad_norm": 0.17941194772720337, |
| "learning_rate": 1.244111111111111e-06, |
| "loss": 0.4601, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.3671622495151907, |
| "grad_norm": 0.17824317514896393, |
| "learning_rate": 1.2395412880687467e-06, |
| "loss": 0.4653, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.3775048480930834, |
| "grad_norm": 0.17118173837661743, |
| "learning_rate": 1.2350288560794302e-06, |
| "loss": 0.4535, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.387847446670976, |
| "grad_norm": 0.19625437259674072, |
| "learning_rate": 1.2305734524824784e-06, |
| "loss": 0.456, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.3981900452488687, |
| "grad_norm": 0.17811912298202515, |
| "learning_rate": 1.2261747146172087e-06, |
| "loss": 0.4428, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.4085326438267614, |
| "grad_norm": 0.17827966809272766, |
| "learning_rate": 1.2218322798229384e-06, |
| "loss": 0.461, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.418875242404654, |
| "grad_norm": 0.19108909368515015, |
| "learning_rate": 1.2175457854389847e-06, |
| "loss": 0.4569, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.4292178409825467, |
| "grad_norm": 0.16872255504131317, |
| "learning_rate": 1.2133148688046646e-06, |
| "loss": 0.4529, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.4395604395604398, |
| "grad_norm": 0.17768999934196472, |
| "learning_rate": 1.2091391672592957e-06, |
| "loss": 0.4512, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.4499030381383324, |
| "grad_norm": 0.17542874813079834, |
| "learning_rate": 1.205018318142195e-06, |
| "loss": 0.4488, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.460245636716225, |
| "grad_norm": 0.18069233000278473, |
| "learning_rate": 1.20095195879268e-06, |
| "loss": 0.4512, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.4705882352941178, |
| "grad_norm": 0.17639058828353882, |
| "learning_rate": 1.1969397265500675e-06, |
| "loss": 0.4489, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.4809308338720104, |
| "grad_norm": 0.17847612500190735, |
| "learning_rate": 1.192981258753675e-06, |
| "loss": 0.45, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.491273432449903, |
| "grad_norm": 0.1748712956905365, |
| "learning_rate": 1.1890761927428197e-06, |
| "loss": 0.438, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.5016160310277957, |
| "grad_norm": 0.1816711723804474, |
| "learning_rate": 1.185224165856819e-06, |
| "loss": 0.4513, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.5119586296056884, |
| "grad_norm": 0.17945519089698792, |
| "learning_rate": 1.1814248154349897e-06, |
| "loss": 0.4505, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.522301228183581, |
| "grad_norm": 0.1755056381225586, |
| "learning_rate": 1.1776777788166495e-06, |
| "loss": 0.4587, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.5326438267614737, |
| "grad_norm": 0.2565567195415497, |
| "learning_rate": 1.1739826933411155e-06, |
| "loss": 0.4578, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.5429864253393664, |
| "grad_norm": 0.1997365802526474, |
| "learning_rate": 1.1703391963477048e-06, |
| "loss": 0.4487, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.553329023917259, |
| "grad_norm": 0.18262676894664764, |
| "learning_rate": 1.1667469251757346e-06, |
| "loss": 0.4453, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.5636716224951517, |
| "grad_norm": 0.17584246397018433, |
| "learning_rate": 1.1632055171645223e-06, |
| "loss": 0.4427, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.5740142210730443, |
| "grad_norm": 0.1811375468969345, |
| "learning_rate": 1.1597146096533852e-06, |
| "loss": 0.4574, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.5843568196509374, |
| "grad_norm": 0.1873408406972885, |
| "learning_rate": 1.1562738399816404e-06, |
| "loss": 0.4562, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.59469941822883, |
| "grad_norm": 0.18968404829502106, |
| "learning_rate": 1.152882845488605e-06, |
| "loss": 0.4468, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.6050420168067228, |
| "grad_norm": 0.1783261001110077, |
| "learning_rate": 1.1495412635135963e-06, |
| "loss": 0.4539, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.6153846153846154, |
| "grad_norm": 0.1903458833694458, |
| "learning_rate": 1.146248731395932e-06, |
| "loss": 0.4578, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.625727213962508, |
| "grad_norm": 0.17537672817707062, |
| "learning_rate": 1.1430048864749287e-06, |
| "loss": 0.4566, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.6360698125404007, |
| "grad_norm": 0.1887020617723465, |
| "learning_rate": 1.139809366089904e-06, |
| "loss": 0.4464, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.6464124111182934, |
| "grad_norm": 0.18123377859592438, |
| "learning_rate": 1.136661807580175e-06, |
| "loss": 0.4593, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.656755009696186, |
| "grad_norm": 0.1783813238143921, |
| "learning_rate": 1.133561848285059e-06, |
| "loss": 0.4499, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.6670976082740787, |
| "grad_norm": 0.17778170108795166, |
| "learning_rate": 1.130509125543873e-06, |
| "loss": 0.4486, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.677440206851972, |
| "grad_norm": 0.18150880932807922, |
| "learning_rate": 1.1275032766959346e-06, |
| "loss": 0.4515, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.6877828054298645, |
| "grad_norm": 0.17898158729076385, |
| "learning_rate": 1.1245439390805607e-06, |
| "loss": 0.4434, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.698125404007757, |
| "grad_norm": 0.18532375991344452, |
| "learning_rate": 1.1216307500370688e-06, |
| "loss": 0.4486, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.70846800258565, |
| "grad_norm": 0.1722891479730606, |
| "learning_rate": 1.118763346904776e-06, |
| "loss": 0.4548, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.7188106011635425, |
| "grad_norm": 0.1772516816854477, |
| "learning_rate": 1.1159413670229997e-06, |
| "loss": 0.4555, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.729153199741435, |
| "grad_norm": 0.17695064842700958, |
| "learning_rate": 1.1131644477310569e-06, |
| "loss": 0.4607, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.7394957983193278, |
| "grad_norm": 0.17609025537967682, |
| "learning_rate": 1.110432226368265e-06, |
| "loss": 0.4446, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.7498383968972204, |
| "grad_norm": 0.17976413667201996, |
| "learning_rate": 1.107744340273941e-06, |
| "loss": 0.4467, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.760180995475113, |
| "grad_norm": 0.18428538739681244, |
| "learning_rate": 1.1051004267874023e-06, |
| "loss": 0.4555, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.7705235940530057, |
| "grad_norm": 0.17254601418972015, |
| "learning_rate": 1.1025001232479663e-06, |
| "loss": 0.4432, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.7808661926308984, |
| "grad_norm": 0.17763815820217133, |
| "learning_rate": 1.09994306699495e-06, |
| "loss": 0.4533, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.791208791208791, |
| "grad_norm": 0.18219688534736633, |
| "learning_rate": 1.097428895367671e-06, |
| "loss": 0.455, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.8015513897866837, |
| "grad_norm": 0.17637620866298676, |
| "learning_rate": 1.0949572457054458e-06, |
| "loss": 0.4406, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.8118939883645764, |
| "grad_norm": 0.1767360419034958, |
| "learning_rate": 1.0925277553475922e-06, |
| "loss": 0.4504, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.822236586942469, |
| "grad_norm": 0.1777689903974533, |
| "learning_rate": 1.0901400616334274e-06, |
| "loss": 0.4443, |
| "step": 1092 |
| }, |
| { |
| "epoch": 2.832579185520362, |
| "grad_norm": 0.17348885536193848, |
| "learning_rate": 1.0877938019022687e-06, |
| "loss": 0.455, |
| "step": 1096 |
| }, |
| { |
| "epoch": 2.842921784098255, |
| "grad_norm": 0.16941364109516144, |
| "learning_rate": 1.0854886134934328e-06, |
| "loss": 0.4463, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.8532643826761475, |
| "grad_norm": 0.1874593198299408, |
| "learning_rate": 1.0832241337462376e-06, |
| "loss": 0.4498, |
| "step": 1104 |
| }, |
| { |
| "epoch": 2.86360698125404, |
| "grad_norm": 0.17837406694889069, |
| "learning_rate": 1.081e-06, |
| "loss": 0.454, |
| "step": 1108 |
| }, |
| { |
| "epoch": 2.8739495798319328, |
| "grad_norm": 0.19155140221118927, |
| "learning_rate": 1.0788158495940373e-06, |
| "loss": 0.4595, |
| "step": 1112 |
| }, |
| { |
| "epoch": 2.8842921784098254, |
| "grad_norm": 0.17509421706199646, |
| "learning_rate": 1.0766713198676667e-06, |
| "loss": 0.4567, |
| "step": 1116 |
| }, |
| { |
| "epoch": 2.894634776987718, |
| "grad_norm": 0.17931582033634186, |
| "learning_rate": 1.0745660481602052e-06, |
| "loss": 0.4536, |
| "step": 1120 |
| }, |
| { |
| "epoch": 2.9049773755656108, |
| "grad_norm": 0.1758774369955063, |
| "learning_rate": 1.0724996718109706e-06, |
| "loss": 0.4532, |
| "step": 1124 |
| }, |
| { |
| "epoch": 2.9153199741435034, |
| "grad_norm": 0.176919624209404, |
| "learning_rate": 1.0704718281592797e-06, |
| "loss": 0.4529, |
| "step": 1128 |
| }, |
| { |
| "epoch": 2.9256625727213965, |
| "grad_norm": 0.1802259087562561, |
| "learning_rate": 1.06848215454445e-06, |
| "loss": 0.4455, |
| "step": 1132 |
| }, |
| { |
| "epoch": 2.936005171299289, |
| "grad_norm": 0.16593530774116516, |
| "learning_rate": 1.0665302883057984e-06, |
| "loss": 0.4459, |
| "step": 1136 |
| }, |
| { |
| "epoch": 2.946347769877182, |
| "grad_norm": 0.17818193137645721, |
| "learning_rate": 1.0646158667826425e-06, |
| "loss": 0.4566, |
| "step": 1140 |
| }, |
| { |
| "epoch": 2.9566903684550745, |
| "grad_norm": 0.17576196789741516, |
| "learning_rate": 1.0627385273142993e-06, |
| "loss": 0.439, |
| "step": 1144 |
| }, |
| { |
| "epoch": 2.967032967032967, |
| "grad_norm": 0.1871625781059265, |
| "learning_rate": 1.0608979072400862e-06, |
| "loss": 0.4507, |
| "step": 1148 |
| }, |
| { |
| "epoch": 2.97737556561086, |
| "grad_norm": 0.18743683397769928, |
| "learning_rate": 1.05909364389932e-06, |
| "loss": 0.449, |
| "step": 1152 |
| }, |
| { |
| "epoch": 2.9877181641887525, |
| "grad_norm": 0.1833522915840149, |
| "learning_rate": 1.0573253746313186e-06, |
| "loss": 0.4577, |
| "step": 1156 |
| }, |
| { |
| "epoch": 2.998060762766645, |
| "grad_norm": 0.17550155520439148, |
| "learning_rate": 1.0555927367753988e-06, |
| "loss": 0.4523, |
| "step": 1160 |
| }, |
| { |
| "epoch": 3.0077569489334195, |
| "grad_norm": 0.17497506737709045, |
| "learning_rate": 1.0538953676708778e-06, |
| "loss": 0.443, |
| "step": 1164 |
| }, |
| { |
| "epoch": 3.018099547511312, |
| "grad_norm": 0.18479323387145996, |
| "learning_rate": 1.052232904657073e-06, |
| "loss": 0.4476, |
| "step": 1168 |
| }, |
| { |
| "epoch": 3.028442146089205, |
| "grad_norm": 0.17023231089115143, |
| "learning_rate": 1.0506049850733017e-06, |
| "loss": 0.4446, |
| "step": 1172 |
| }, |
| { |
| "epoch": 3.0387847446670975, |
| "grad_norm": 0.194169819355011, |
| "learning_rate": 1.0490112462588812e-06, |
| "loss": 0.4436, |
| "step": 1176 |
| }, |
| { |
| "epoch": 3.04912734324499, |
| "grad_norm": 0.16968706250190735, |
| "learning_rate": 1.0474513255531283e-06, |
| "loss": 0.4445, |
| "step": 1180 |
| }, |
| { |
| "epoch": 3.059469941822883, |
| "grad_norm": 0.18906928598880768, |
| "learning_rate": 1.0459248602953606e-06, |
| "loss": 0.464, |
| "step": 1184 |
| }, |
| { |
| "epoch": 3.069812540400776, |
| "grad_norm": 0.17577582597732544, |
| "learning_rate": 1.0444314878248952e-06, |
| "loss": 0.4505, |
| "step": 1188 |
| }, |
| { |
| "epoch": 3.0801551389786685, |
| "grad_norm": 0.16838566958904266, |
| "learning_rate": 1.0429708454810495e-06, |
| "loss": 0.4438, |
| "step": 1192 |
| }, |
| { |
| "epoch": 3.090497737556561, |
| "grad_norm": 0.18139545619487762, |
| "learning_rate": 1.0415425706031406e-06, |
| "loss": 0.4515, |
| "step": 1196 |
| }, |
| { |
| "epoch": 3.100840336134454, |
| "grad_norm": 0.17523127794265747, |
| "learning_rate": 1.0401463005304855e-06, |
| "loss": 0.4475, |
| "step": 1200 |
| }, |
| { |
| "epoch": 3.1111829347123465, |
| "grad_norm": 0.18693816661834717, |
| "learning_rate": 1.0387816726024021e-06, |
| "loss": 0.4447, |
| "step": 1204 |
| }, |
| { |
| "epoch": 3.121525533290239, |
| "grad_norm": 0.16429254412651062, |
| "learning_rate": 1.0374483241582069e-06, |
| "loss": 0.4509, |
| "step": 1208 |
| }, |
| { |
| "epoch": 3.131868131868132, |
| "grad_norm": 0.18026259541511536, |
| "learning_rate": 1.0361458925372175e-06, |
| "loss": 0.4548, |
| "step": 1212 |
| }, |
| { |
| "epoch": 3.1422107304460245, |
| "grad_norm": 0.19680114090442657, |
| "learning_rate": 1.034874015078751e-06, |
| "loss": 0.4477, |
| "step": 1216 |
| }, |
| { |
| "epoch": 3.152553329023917, |
| "grad_norm": 0.17316031455993652, |
| "learning_rate": 1.033632329122125e-06, |
| "loss": 0.4493, |
| "step": 1220 |
| }, |
| { |
| "epoch": 3.16289592760181, |
| "grad_norm": 0.16267041862010956, |
| "learning_rate": 1.0324204720066564e-06, |
| "loss": 0.4409, |
| "step": 1224 |
| }, |
| { |
| "epoch": 3.1732385261797025, |
| "grad_norm": 0.2089986652135849, |
| "learning_rate": 1.0312380810716623e-06, |
| "loss": 0.4504, |
| "step": 1228 |
| }, |
| { |
| "epoch": 3.183581124757595, |
| "grad_norm": 0.16956327855587006, |
| "learning_rate": 1.0300847936564602e-06, |
| "loss": 0.4471, |
| "step": 1232 |
| }, |
| { |
| "epoch": 3.1939237233354882, |
| "grad_norm": 0.17798687517642975, |
| "learning_rate": 1.0289602471003672e-06, |
| "loss": 0.4507, |
| "step": 1236 |
| }, |
| { |
| "epoch": 3.204266321913381, |
| "grad_norm": 0.17512640357017517, |
| "learning_rate": 1.0278640787427008e-06, |
| "loss": 0.446, |
| "step": 1240 |
| }, |
| { |
| "epoch": 3.2146089204912736, |
| "grad_norm": 0.17578400671482086, |
| "learning_rate": 1.0267959259227778e-06, |
| "loss": 0.4484, |
| "step": 1244 |
| }, |
| { |
| "epoch": 3.224951519069166, |
| "grad_norm": 0.17712455987930298, |
| "learning_rate": 1.0257554259799157e-06, |
| "loss": 0.4481, |
| "step": 1248 |
| }, |
| { |
| "epoch": 3.235294117647059, |
| "grad_norm": 0.1793729066848755, |
| "learning_rate": 1.0247422162534318e-06, |
| "loss": 0.441, |
| "step": 1252 |
| }, |
| { |
| "epoch": 3.2456367162249515, |
| "grad_norm": 0.16967114806175232, |
| "learning_rate": 1.0237559340826432e-06, |
| "loss": 0.4525, |
| "step": 1256 |
| }, |
| { |
| "epoch": 3.255979314802844, |
| "grad_norm": 0.1762080043554306, |
| "learning_rate": 1.022796216806867e-06, |
| "loss": 0.4344, |
| "step": 1260 |
| }, |
| { |
| "epoch": 3.266321913380737, |
| "grad_norm": 0.1827186495065689, |
| "learning_rate": 1.021862701765421e-06, |
| "loss": 0.4519, |
| "step": 1264 |
| }, |
| { |
| "epoch": 3.2766645119586295, |
| "grad_norm": 0.17989112436771393, |
| "learning_rate": 1.0209550262976217e-06, |
| "loss": 0.4513, |
| "step": 1268 |
| }, |
| { |
| "epoch": 3.287007110536522, |
| "grad_norm": 0.18342699110507965, |
| "learning_rate": 1.0200728277427867e-06, |
| "loss": 0.4485, |
| "step": 1272 |
| }, |
| { |
| "epoch": 3.297349709114415, |
| "grad_norm": 0.19681668281555176, |
| "learning_rate": 1.0192157434402333e-06, |
| "loss": 0.4429, |
| "step": 1276 |
| }, |
| { |
| "epoch": 3.3076923076923075, |
| "grad_norm": 0.172057643532753, |
| "learning_rate": 1.0183834107292785e-06, |
| "loss": 0.4525, |
| "step": 1280 |
| }, |
| { |
| "epoch": 3.3180349062702, |
| "grad_norm": 0.16999749839305878, |
| "learning_rate": 1.0175754669492397e-06, |
| "loss": 0.4532, |
| "step": 1284 |
| }, |
| { |
| "epoch": 3.3283775048480932, |
| "grad_norm": 0.18436214327812195, |
| "learning_rate": 1.0167915494394342e-06, |
| "loss": 0.4566, |
| "step": 1288 |
| }, |
| { |
| "epoch": 3.338720103425986, |
| "grad_norm": 0.16745057702064514, |
| "learning_rate": 1.016031295539179e-06, |
| "loss": 0.4605, |
| "step": 1292 |
| }, |
| { |
| "epoch": 3.3490627020038786, |
| "grad_norm": 0.18557773530483246, |
| "learning_rate": 1.0152943425877918e-06, |
| "loss": 0.4457, |
| "step": 1296 |
| }, |
| { |
| "epoch": 3.3594053005817712, |
| "grad_norm": 0.181436225771904, |
| "learning_rate": 1.0145803279245893e-06, |
| "loss": 0.439, |
| "step": 1300 |
| }, |
| { |
| "epoch": 3.369747899159664, |
| "grad_norm": 0.17534784972667694, |
| "learning_rate": 1.0138888888888887e-06, |
| "loss": 0.4437, |
| "step": 1304 |
| }, |
| { |
| "epoch": 3.3800904977375565, |
| "grad_norm": 0.17567597329616547, |
| "learning_rate": 1.0132196628200078e-06, |
| "loss": 0.4405, |
| "step": 1308 |
| }, |
| { |
| "epoch": 3.390433096315449, |
| "grad_norm": 0.17782679200172424, |
| "learning_rate": 1.0125722870572635e-06, |
| "loss": 0.4468, |
| "step": 1312 |
| }, |
| { |
| "epoch": 3.400775694893342, |
| "grad_norm": 0.17457668483257294, |
| "learning_rate": 1.011946398939973e-06, |
| "loss": 0.454, |
| "step": 1316 |
| }, |
| { |
| "epoch": 3.4111182934712345, |
| "grad_norm": 0.16642296314239502, |
| "learning_rate": 1.0113416358074535e-06, |
| "loss": 0.4465, |
| "step": 1320 |
| }, |
| { |
| "epoch": 3.421460892049127, |
| "grad_norm": 0.17336755990982056, |
| "learning_rate": 1.0107576349990224e-06, |
| "loss": 0.456, |
| "step": 1324 |
| }, |
| { |
| "epoch": 3.4318034906270203, |
| "grad_norm": 0.18164758384227753, |
| "learning_rate": 1.010194033853997e-06, |
| "loss": 0.4469, |
| "step": 1328 |
| }, |
| { |
| "epoch": 3.442146089204913, |
| "grad_norm": 0.1722988784313202, |
| "learning_rate": 1.009650469711694e-06, |
| "loss": 0.4471, |
| "step": 1332 |
| }, |
| { |
| "epoch": 3.4524886877828056, |
| "grad_norm": 0.17613555490970612, |
| "learning_rate": 1.0091265799114313e-06, |
| "loss": 0.4473, |
| "step": 1336 |
| }, |
| { |
| "epoch": 3.4628312863606983, |
| "grad_norm": 0.17907673120498657, |
| "learning_rate": 1.008622001792526e-06, |
| "loss": 0.4611, |
| "step": 1340 |
| }, |
| { |
| "epoch": 3.473173884938591, |
| "grad_norm": 0.18107053637504578, |
| "learning_rate": 1.0081363726942948e-06, |
| "loss": 0.4415, |
| "step": 1344 |
| }, |
| { |
| "epoch": 3.4835164835164836, |
| "grad_norm": 0.19422635436058044, |
| "learning_rate": 1.0076693299560557e-06, |
| "loss": 0.4413, |
| "step": 1348 |
| }, |
| { |
| "epoch": 3.4938590820943762, |
| "grad_norm": 0.1928495168685913, |
| "learning_rate": 1.0072205109171253e-06, |
| "loss": 0.4525, |
| "step": 1352 |
| }, |
| { |
| "epoch": 3.504201680672269, |
| "grad_norm": 0.17438076436519623, |
| "learning_rate": 1.0067895529168213e-06, |
| "loss": 0.4435, |
| "step": 1356 |
| }, |
| { |
| "epoch": 3.5145442792501616, |
| "grad_norm": 0.17133189737796783, |
| "learning_rate": 1.0063760932944606e-06, |
| "loss": 0.4574, |
| "step": 1360 |
| }, |
| { |
| "epoch": 3.524886877828054, |
| "grad_norm": 0.18594177067279816, |
| "learning_rate": 1.0059797693893605e-06, |
| "loss": 0.4434, |
| "step": 1364 |
| }, |
| { |
| "epoch": 3.535229476405947, |
| "grad_norm": 0.1890227347612381, |
| "learning_rate": 1.0056002185408385e-06, |
| "loss": 0.4556, |
| "step": 1368 |
| }, |
| { |
| "epoch": 3.5455720749838395, |
| "grad_norm": 0.17522525787353516, |
| "learning_rate": 1.0052370780882114e-06, |
| "loss": 0.4506, |
| "step": 1372 |
| }, |
| { |
| "epoch": 3.555914673561732, |
| "grad_norm": 0.18247763812541962, |
| "learning_rate": 1.0048899853707968e-06, |
| "loss": 0.4485, |
| "step": 1376 |
| }, |
| { |
| "epoch": 3.566257272139625, |
| "grad_norm": 0.18556474149227142, |
| "learning_rate": 1.0045585777279118e-06, |
| "loss": 0.4579, |
| "step": 1380 |
| }, |
| { |
| "epoch": 3.5765998707175175, |
| "grad_norm": 0.17300067842006683, |
| "learning_rate": 1.0042424924988736e-06, |
| "loss": 0.4512, |
| "step": 1384 |
| }, |
| { |
| "epoch": 3.5869424692954106, |
| "grad_norm": 0.18004953861236572, |
| "learning_rate": 1.0039413670229995e-06, |
| "loss": 0.4453, |
| "step": 1388 |
| }, |
| { |
| "epoch": 3.5972850678733033, |
| "grad_norm": 0.1785740703344345, |
| "learning_rate": 1.0036548386396067e-06, |
| "loss": 0.4449, |
| "step": 1392 |
| }, |
| { |
| "epoch": 3.607627666451196, |
| "grad_norm": 0.16730371117591858, |
| "learning_rate": 1.0033825446880127e-06, |
| "loss": 0.4485, |
| "step": 1396 |
| }, |
| { |
| "epoch": 3.6179702650290886, |
| "grad_norm": 0.18206489086151123, |
| "learning_rate": 1.0031241225075341e-06, |
| "loss": 0.4471, |
| "step": 1400 |
| }, |
| { |
| "epoch": 3.6283128636069812, |
| "grad_norm": 0.17622709274291992, |
| "learning_rate": 1.0028792094374887e-06, |
| "loss": 0.4436, |
| "step": 1404 |
| }, |
| { |
| "epoch": 3.638655462184874, |
| "grad_norm": 0.18737947940826416, |
| "learning_rate": 1.0026474428171935e-06, |
| "loss": 0.4389, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.6489980607627666, |
| "grad_norm": 0.17098890244960785, |
| "learning_rate": 1.0024284599859658e-06, |
| "loss": 0.4363, |
| "step": 1412 |
| }, |
| { |
| "epoch": 3.659340659340659, |
| "grad_norm": 0.17516440153121948, |
| "learning_rate": 1.0022218982831228e-06, |
| "loss": 0.4503, |
| "step": 1416 |
| }, |
| { |
| "epoch": 3.669683257918552, |
| "grad_norm": 0.18128778040409088, |
| "learning_rate": 1.0020273950479815e-06, |
| "loss": 0.441, |
| "step": 1420 |
| }, |
| { |
| "epoch": 3.680025856496445, |
| "grad_norm": 0.18445086479187012, |
| "learning_rate": 1.0018445876198597e-06, |
| "loss": 0.4389, |
| "step": 1424 |
| }, |
| { |
| "epoch": 3.6903684550743376, |
| "grad_norm": 0.18264997005462646, |
| "learning_rate": 1.0016731133380744e-06, |
| "loss": 0.4394, |
| "step": 1428 |
| }, |
| { |
| "epoch": 3.7007110536522303, |
| "grad_norm": 0.17699220776557922, |
| "learning_rate": 1.0015126095419424e-06, |
| "loss": 0.4494, |
| "step": 1432 |
| }, |
| { |
| "epoch": 3.711053652230123, |
| "grad_norm": 0.17461882531642914, |
| "learning_rate": 1.0013627135707816e-06, |
| "loss": 0.4521, |
| "step": 1436 |
| }, |
| { |
| "epoch": 3.7213962508080156, |
| "grad_norm": 0.17526471614837646, |
| "learning_rate": 1.0012230627639088e-06, |
| "loss": 0.4389, |
| "step": 1440 |
| }, |
| { |
| "epoch": 3.7317388493859083, |
| "grad_norm": 0.17140990495681763, |
| "learning_rate": 1.0010932944606414e-06, |
| "loss": 0.4489, |
| "step": 1444 |
| }, |
| { |
| "epoch": 3.742081447963801, |
| "grad_norm": 0.19351443648338318, |
| "learning_rate": 1.0009730460002965e-06, |
| "loss": 0.4436, |
| "step": 1448 |
| }, |
| { |
| "epoch": 3.7524240465416936, |
| "grad_norm": 0.1850733458995819, |
| "learning_rate": 1.0008619547221914e-06, |
| "loss": 0.4514, |
| "step": 1452 |
| }, |
| { |
| "epoch": 3.7627666451195863, |
| "grad_norm": 0.18625636398792267, |
| "learning_rate": 1.0007596579656435e-06, |
| "loss": 0.4484, |
| "step": 1456 |
| }, |
| { |
| "epoch": 3.773109243697479, |
| "grad_norm": 0.18134167790412903, |
| "learning_rate": 1.00066579306997e-06, |
| "loss": 0.4549, |
| "step": 1460 |
| }, |
| { |
| "epoch": 3.7834518422753716, |
| "grad_norm": 0.19236284494400024, |
| "learning_rate": 1.0005799973744876e-06, |
| "loss": 0.4433, |
| "step": 1464 |
| }, |
| { |
| "epoch": 3.7937944408532642, |
| "grad_norm": 0.1741548478603363, |
| "learning_rate": 1.0005019082185143e-06, |
| "loss": 0.4476, |
| "step": 1468 |
| }, |
| { |
| "epoch": 3.804137039431157, |
| "grad_norm": 0.17673614621162415, |
| "learning_rate": 1.000431162941367e-06, |
| "loss": 0.4462, |
| "step": 1472 |
| }, |
| { |
| "epoch": 3.8144796380090495, |
| "grad_norm": 0.1771819293498993, |
| "learning_rate": 1.0003673988823628e-06, |
| "loss": 0.4561, |
| "step": 1476 |
| }, |
| { |
| "epoch": 3.824822236586942, |
| "grad_norm": 0.17858363687992096, |
| "learning_rate": 1.0003102533808192e-06, |
| "loss": 0.4479, |
| "step": 1480 |
| }, |
| { |
| "epoch": 3.8351648351648353, |
| "grad_norm": 0.16550451517105103, |
| "learning_rate": 1.0002593637760531e-06, |
| "loss": 0.4488, |
| "step": 1484 |
| }, |
| { |
| "epoch": 3.845507433742728, |
| "grad_norm": 0.17858079075813293, |
| "learning_rate": 1.0002143674073823e-06, |
| "loss": 0.45, |
| "step": 1488 |
| }, |
| { |
| "epoch": 3.8558500323206206, |
| "grad_norm": 0.187116801738739, |
| "learning_rate": 1.0001749016141235e-06, |
| "loss": 0.4474, |
| "step": 1492 |
| }, |
| { |
| "epoch": 3.8661926308985133, |
| "grad_norm": 0.18668505549430847, |
| "learning_rate": 1.0001406037355939e-06, |
| "loss": 0.4432, |
| "step": 1496 |
| }, |
| { |
| "epoch": 3.876535229476406, |
| "grad_norm": 0.17585262656211853, |
| "learning_rate": 1.0001111111111112e-06, |
| "loss": 0.4439, |
| "step": 1500 |
| }, |
| { |
| "epoch": 3.8868778280542986, |
| "grad_norm": 0.17823632061481476, |
| "learning_rate": 1.0000860610799922e-06, |
| "loss": 0.4576, |
| "step": 1504 |
| }, |
| { |
| "epoch": 3.8972204266321913, |
| "grad_norm": 0.1767703890800476, |
| "learning_rate": 1.0000650909815542e-06, |
| "loss": 0.4542, |
| "step": 1508 |
| }, |
| { |
| "epoch": 3.907563025210084, |
| "grad_norm": 0.16956399381160736, |
| "learning_rate": 1.0000478381551149e-06, |
| "loss": 0.4533, |
| "step": 1512 |
| }, |
| { |
| "epoch": 3.9179056237879766, |
| "grad_norm": 0.17914198338985443, |
| "learning_rate": 1.000033939939991e-06, |
| "loss": 0.4456, |
| "step": 1516 |
| }, |
| { |
| "epoch": 3.9282482223658697, |
| "grad_norm": 0.1634252965450287, |
| "learning_rate": 1.0000230336754998e-06, |
| "loss": 0.4466, |
| "step": 1520 |
| }, |
| { |
| "epoch": 3.9385908209437623, |
| "grad_norm": 0.17150919139385223, |
| "learning_rate": 1.0000147567009588e-06, |
| "loss": 0.4466, |
| "step": 1524 |
| }, |
| { |
| "epoch": 3.948933419521655, |
| "grad_norm": 0.18172475695610046, |
| "learning_rate": 1.0000087463556851e-06, |
| "loss": 0.4431, |
| "step": 1528 |
| }, |
| { |
| "epoch": 3.9592760180995477, |
| "grad_norm": 0.1767711490392685, |
| "learning_rate": 1.000004639978996e-06, |
| "loss": 0.4512, |
| "step": 1532 |
| }, |
| { |
| "epoch": 3.9696186166774403, |
| "grad_norm": 0.1694304198026657, |
| "learning_rate": 1.0000020749102083e-06, |
| "loss": 0.4483, |
| "step": 1536 |
| }, |
| { |
| "epoch": 3.979961215255333, |
| "grad_norm": 0.18949837982654572, |
| "learning_rate": 1.0000006884886399e-06, |
| "loss": 0.4464, |
| "step": 1540 |
| }, |
| { |
| "epoch": 3.9903038138332256, |
| "grad_norm": 0.17919163405895233, |
| "learning_rate": 1.0000001180536076e-06, |
| "loss": 0.4572, |
| "step": 1544 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.3023968040943146, |
| "learning_rate": 1.0000000009444288e-06, |
| "loss": 0.4456, |
| "step": 1548 |
| } |
| ], |
| "logging_steps": 4, |
| "max_steps": 1548, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 4, |
| "save_steps": 15000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.286572983201956e+17, |
| "train_batch_size": 216, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|