| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.982278481012658, | |
| "eval_steps": 500, | |
| "global_step": 1230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004050632911392405, | |
| "grad_norm": 6.493779741035002, | |
| "learning_rate": 3.2520325203252037e-07, | |
| "loss": 1.0369, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00810126582278481, | |
| "grad_norm": 6.515106229518725, | |
| "learning_rate": 6.504065040650407e-07, | |
| "loss": 1.0456, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.012151898734177215, | |
| "grad_norm": 6.429995969071275, | |
| "learning_rate": 9.75609756097561e-07, | |
| "loss": 1.0189, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01620253164556962, | |
| "grad_norm": 6.387039354914473, | |
| "learning_rate": 1.3008130081300815e-06, | |
| "loss": 1.0409, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.020253164556962026, | |
| "grad_norm": 6.116498044645081, | |
| "learning_rate": 1.6260162601626018e-06, | |
| "loss": 1.0311, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02430379746835443, | |
| "grad_norm": 5.941014474026115, | |
| "learning_rate": 1.951219512195122e-06, | |
| "loss": 1.0236, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.028354430379746835, | |
| "grad_norm": 4.653955647394794, | |
| "learning_rate": 2.2764227642276426e-06, | |
| "loss": 0.9925, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03240506329113924, | |
| "grad_norm": 4.182108605052275, | |
| "learning_rate": 2.601626016260163e-06, | |
| "loss": 0.9875, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03645569620253165, | |
| "grad_norm": 2.7833095417146194, | |
| "learning_rate": 2.926829268292683e-06, | |
| "loss": 0.966, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04050632911392405, | |
| "grad_norm": 2.5982924224651462, | |
| "learning_rate": 3.2520325203252037e-06, | |
| "loss": 0.9588, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.044556962025316456, | |
| "grad_norm": 2.496034039850331, | |
| "learning_rate": 3.577235772357724e-06, | |
| "loss": 0.9444, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.04860759493670886, | |
| "grad_norm": 4.040707066018245, | |
| "learning_rate": 3.902439024390244e-06, | |
| "loss": 0.9377, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.052658227848101265, | |
| "grad_norm": 4.292346111875974, | |
| "learning_rate": 4.227642276422765e-06, | |
| "loss": 0.9281, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05670886075949367, | |
| "grad_norm": 4.00842290769796, | |
| "learning_rate": 4.552845528455285e-06, | |
| "loss": 0.9214, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.060759493670886074, | |
| "grad_norm": 3.7344061702213467, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 0.9315, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06481012658227848, | |
| "grad_norm": 2.7848035744391844, | |
| "learning_rate": 5.203252032520326e-06, | |
| "loss": 0.8908, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06886075949367089, | |
| "grad_norm": 2.6387229811312456, | |
| "learning_rate": 5.528455284552846e-06, | |
| "loss": 0.8645, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0729113924050633, | |
| "grad_norm": 2.0405970359934886, | |
| "learning_rate": 5.853658536585366e-06, | |
| "loss": 0.842, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0769620253164557, | |
| "grad_norm": 1.5063531839872482, | |
| "learning_rate": 6.178861788617887e-06, | |
| "loss": 0.8444, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.0810126582278481, | |
| "grad_norm": 1.2231838348418702, | |
| "learning_rate": 6.504065040650407e-06, | |
| "loss": 0.8399, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08506329113924051, | |
| "grad_norm": 1.1816006571265374, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 0.8338, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.08911392405063291, | |
| "grad_norm": 1.187015584076262, | |
| "learning_rate": 7.154471544715448e-06, | |
| "loss": 0.7943, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09316455696202532, | |
| "grad_norm": 1.0826454198633744, | |
| "learning_rate": 7.4796747967479676e-06, | |
| "loss": 0.7991, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.09721518987341772, | |
| "grad_norm": 0.9916375884291238, | |
| "learning_rate": 7.804878048780489e-06, | |
| "loss": 0.795, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10126582278481013, | |
| "grad_norm": 0.8768118030200569, | |
| "learning_rate": 8.130081300813009e-06, | |
| "loss": 0.7858, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.10531645569620253, | |
| "grad_norm": 0.8843277046551754, | |
| "learning_rate": 8.45528455284553e-06, | |
| "loss": 0.7671, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.10936708860759493, | |
| "grad_norm": 0.8869623782243337, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 0.7686, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11341772151898734, | |
| "grad_norm": 0.7637712328586501, | |
| "learning_rate": 9.10569105691057e-06, | |
| "loss": 0.7552, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.11746835443037974, | |
| "grad_norm": 0.7726766110043194, | |
| "learning_rate": 9.43089430894309e-06, | |
| "loss": 0.7414, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12151898734177215, | |
| "grad_norm": 0.8359043319346965, | |
| "learning_rate": 9.756097560975611e-06, | |
| "loss": 0.7657, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12556962025316457, | |
| "grad_norm": 0.7278177337741392, | |
| "learning_rate": 1.008130081300813e-05, | |
| "loss": 0.737, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.12962025316455697, | |
| "grad_norm": 0.7726847175656346, | |
| "learning_rate": 1.0406504065040652e-05, | |
| "loss": 0.7289, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.13367088607594937, | |
| "grad_norm": 0.7146625232042311, | |
| "learning_rate": 1.0731707317073172e-05, | |
| "loss": 0.7401, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.13772151898734178, | |
| "grad_norm": 0.769595424293189, | |
| "learning_rate": 1.1056910569105692e-05, | |
| "loss": 0.7332, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.14177215189873418, | |
| "grad_norm": 0.7077250993009653, | |
| "learning_rate": 1.1382113821138213e-05, | |
| "loss": 0.7279, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1458227848101266, | |
| "grad_norm": 0.6259048383641109, | |
| "learning_rate": 1.1707317073170731e-05, | |
| "loss": 0.721, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.149873417721519, | |
| "grad_norm": 0.7064145889175929, | |
| "learning_rate": 1.2032520325203254e-05, | |
| "loss": 0.7023, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1539240506329114, | |
| "grad_norm": 0.5739218033720924, | |
| "learning_rate": 1.2357723577235774e-05, | |
| "loss": 0.7075, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1579746835443038, | |
| "grad_norm": 0.5290977416891973, | |
| "learning_rate": 1.2682926829268294e-05, | |
| "loss": 0.709, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.1620253164556962, | |
| "grad_norm": 0.6338445755753618, | |
| "learning_rate": 1.3008130081300815e-05, | |
| "loss": 0.7225, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1660759493670886, | |
| "grad_norm": 0.582008946784302, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7013, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.17012658227848101, | |
| "grad_norm": 0.46148416206026244, | |
| "learning_rate": 1.3658536585365855e-05, | |
| "loss": 0.6941, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17417721518987342, | |
| "grad_norm": 0.5518185411745675, | |
| "learning_rate": 1.3983739837398376e-05, | |
| "loss": 0.7036, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.17822784810126582, | |
| "grad_norm": 0.4639122282449821, | |
| "learning_rate": 1.4308943089430896e-05, | |
| "loss": 0.6916, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.18227848101265823, | |
| "grad_norm": 0.452346813462064, | |
| "learning_rate": 1.4634146341463415e-05, | |
| "loss": 0.6923, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18632911392405063, | |
| "grad_norm": 0.477960285448589, | |
| "learning_rate": 1.4959349593495935e-05, | |
| "loss": 0.694, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.19037974683544304, | |
| "grad_norm": 0.48180303782475953, | |
| "learning_rate": 1.528455284552846e-05, | |
| "loss": 0.6917, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.19443037974683544, | |
| "grad_norm": 0.5043342484339408, | |
| "learning_rate": 1.5609756097560978e-05, | |
| "loss": 0.6898, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.19848101265822785, | |
| "grad_norm": 0.5563154851040636, | |
| "learning_rate": 1.5934959349593496e-05, | |
| "loss": 0.6752, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.20253164556962025, | |
| "grad_norm": 0.5461413876421156, | |
| "learning_rate": 1.6260162601626018e-05, | |
| "loss": 0.6908, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20658227848101265, | |
| "grad_norm": 0.6520126658339338, | |
| "learning_rate": 1.6585365853658537e-05, | |
| "loss": 0.707, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.21063291139240506, | |
| "grad_norm": 0.5813047978084712, | |
| "learning_rate": 1.691056910569106e-05, | |
| "loss": 0.7037, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21468354430379746, | |
| "grad_norm": 0.5577770947575851, | |
| "learning_rate": 1.7235772357723578e-05, | |
| "loss": 0.6686, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.21873417721518987, | |
| "grad_norm": 0.5415169789148744, | |
| "learning_rate": 1.75609756097561e-05, | |
| "loss": 0.6789, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.22278481012658227, | |
| "grad_norm": 0.6484662988339179, | |
| "learning_rate": 1.788617886178862e-05, | |
| "loss": 0.6889, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22683544303797468, | |
| "grad_norm": 0.5465724606158754, | |
| "learning_rate": 1.821138211382114e-05, | |
| "loss": 0.6959, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.23088607594936708, | |
| "grad_norm": 0.5850420519700699, | |
| "learning_rate": 1.8536585365853663e-05, | |
| "loss": 0.6672, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.23493670886075949, | |
| "grad_norm": 0.7922879583091695, | |
| "learning_rate": 1.886178861788618e-05, | |
| "loss": 0.6776, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2389873417721519, | |
| "grad_norm": 0.8134985296805135, | |
| "learning_rate": 1.91869918699187e-05, | |
| "loss": 0.6696, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2430379746835443, | |
| "grad_norm": 0.7031140151725898, | |
| "learning_rate": 1.9512195121951222e-05, | |
| "loss": 0.6731, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2470886075949367, | |
| "grad_norm": 0.4754470638622413, | |
| "learning_rate": 1.983739837398374e-05, | |
| "loss": 0.6822, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.25113924050632913, | |
| "grad_norm": 0.6127230308312909, | |
| "learning_rate": 2.016260162601626e-05, | |
| "loss": 0.6727, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.25518987341772154, | |
| "grad_norm": 0.655461466949352, | |
| "learning_rate": 2.048780487804878e-05, | |
| "loss": 0.6649, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.25924050632911394, | |
| "grad_norm": 0.7142679376537177, | |
| "learning_rate": 2.0813008130081303e-05, | |
| "loss": 0.671, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.26329113924050634, | |
| "grad_norm": 0.6355258786430444, | |
| "learning_rate": 2.1138211382113822e-05, | |
| "loss": 0.658, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26734177215189875, | |
| "grad_norm": 0.601739330859932, | |
| "learning_rate": 2.1463414634146344e-05, | |
| "loss": 0.6582, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.27139240506329115, | |
| "grad_norm": 0.702154219398689, | |
| "learning_rate": 2.1788617886178863e-05, | |
| "loss": 0.6697, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.27544303797468356, | |
| "grad_norm": 0.7634367175867832, | |
| "learning_rate": 2.2113821138211385e-05, | |
| "loss": 0.6636, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.27949367088607596, | |
| "grad_norm": 0.9235175913292207, | |
| "learning_rate": 2.2439024390243907e-05, | |
| "loss": 0.6477, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.28354430379746837, | |
| "grad_norm": 1.0254992259705693, | |
| "learning_rate": 2.2764227642276426e-05, | |
| "loss": 0.6615, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.28759493670886077, | |
| "grad_norm": 0.7727646963955114, | |
| "learning_rate": 2.3089430894308948e-05, | |
| "loss": 0.6522, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2916455696202532, | |
| "grad_norm": 0.7790381555019846, | |
| "learning_rate": 2.3414634146341463e-05, | |
| "loss": 0.6526, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2956962025316456, | |
| "grad_norm": 0.6994731480588183, | |
| "learning_rate": 2.3739837398373985e-05, | |
| "loss": 0.6413, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.299746835443038, | |
| "grad_norm": 0.9156293173170503, | |
| "learning_rate": 2.4065040650406507e-05, | |
| "loss": 0.6484, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3037974683544304, | |
| "grad_norm": 1.421362613775901, | |
| "learning_rate": 2.4390243902439026e-05, | |
| "loss": 0.6565, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3078481012658228, | |
| "grad_norm": 0.754699546883223, | |
| "learning_rate": 2.4715447154471548e-05, | |
| "loss": 0.6529, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3118987341772152, | |
| "grad_norm": 1.1756426852249031, | |
| "learning_rate": 2.5040650406504066e-05, | |
| "loss": 0.6616, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3159493670886076, | |
| "grad_norm": 0.9388260925549986, | |
| "learning_rate": 2.536585365853659e-05, | |
| "loss": 0.648, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.9545136533609573, | |
| "learning_rate": 2.569105691056911e-05, | |
| "loss": 0.6694, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3240506329113924, | |
| "grad_norm": 1.302449385376475, | |
| "learning_rate": 2.601626016260163e-05, | |
| "loss": 0.6519, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3281012658227848, | |
| "grad_norm": 0.6623514031959914, | |
| "learning_rate": 2.634146341463415e-05, | |
| "loss": 0.6616, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3321518987341772, | |
| "grad_norm": 0.844931208384013, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.6592, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3362025316455696, | |
| "grad_norm": 1.4191222191333228, | |
| "learning_rate": 2.699186991869919e-05, | |
| "loss": 0.6638, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.34025316455696203, | |
| "grad_norm": 0.6883700383015497, | |
| "learning_rate": 2.731707317073171e-05, | |
| "loss": 0.6445, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.34430379746835443, | |
| "grad_norm": 1.0459717782117925, | |
| "learning_rate": 2.764227642276423e-05, | |
| "loss": 0.6405, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.34835443037974684, | |
| "grad_norm": 1.3014935120548765, | |
| "learning_rate": 2.796747967479675e-05, | |
| "loss": 0.6408, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.35240506329113924, | |
| "grad_norm": 0.7902832428651478, | |
| "learning_rate": 2.829268292682927e-05, | |
| "loss": 0.644, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.35645569620253165, | |
| "grad_norm": 1.1790507455720673, | |
| "learning_rate": 2.8617886178861792e-05, | |
| "loss": 0.6557, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.36050632911392405, | |
| "grad_norm": 0.9308727298662697, | |
| "learning_rate": 2.8943089430894314e-05, | |
| "loss": 0.6421, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.36455696202531646, | |
| "grad_norm": 1.3852444679779106, | |
| "learning_rate": 2.926829268292683e-05, | |
| "loss": 0.6568, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36860759493670886, | |
| "grad_norm": 1.083175881998161, | |
| "learning_rate": 2.959349593495935e-05, | |
| "loss": 0.6548, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.37265822784810126, | |
| "grad_norm": 1.3023777120228035, | |
| "learning_rate": 2.991869918699187e-05, | |
| "loss": 0.6309, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.37670886075949367, | |
| "grad_norm": 1.169229690621438, | |
| "learning_rate": 3.0243902439024392e-05, | |
| "loss": 0.6354, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3807594936708861, | |
| "grad_norm": 1.2343495144807062, | |
| "learning_rate": 3.056910569105692e-05, | |
| "loss": 0.6518, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3848101265822785, | |
| "grad_norm": 0.7880681493978746, | |
| "learning_rate": 3.089430894308943e-05, | |
| "loss": 0.6454, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3888607594936709, | |
| "grad_norm": 0.9845266495398576, | |
| "learning_rate": 3.1219512195121955e-05, | |
| "loss": 0.6402, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3929113924050633, | |
| "grad_norm": 1.2835785749186486, | |
| "learning_rate": 3.154471544715447e-05, | |
| "loss": 0.6342, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3969620253164557, | |
| "grad_norm": 0.8941179056871833, | |
| "learning_rate": 3.186991869918699e-05, | |
| "loss": 0.6437, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4010126582278481, | |
| "grad_norm": 1.1492862601767255, | |
| "learning_rate": 3.2195121951219514e-05, | |
| "loss": 0.6269, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4050632911392405, | |
| "grad_norm": 0.9435850054148426, | |
| "learning_rate": 3.2520325203252037e-05, | |
| "loss": 0.6375, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4091139240506329, | |
| "grad_norm": 1.1560716364398882, | |
| "learning_rate": 3.284552845528456e-05, | |
| "loss": 0.6455, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.4131645569620253, | |
| "grad_norm": 1.3752429986711991, | |
| "learning_rate": 3.3170731707317074e-05, | |
| "loss": 0.6391, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4172151898734177, | |
| "grad_norm": 0.8529747886238581, | |
| "learning_rate": 3.3495934959349596e-05, | |
| "loss": 0.6402, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4212658227848101, | |
| "grad_norm": 1.2384909332740734, | |
| "learning_rate": 3.382113821138212e-05, | |
| "loss": 0.6496, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4253164556962025, | |
| "grad_norm": 0.782143387645592, | |
| "learning_rate": 3.414634146341463e-05, | |
| "loss": 0.6422, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4293670886075949, | |
| "grad_norm": 1.2921701082047912, | |
| "learning_rate": 3.4471544715447155e-05, | |
| "loss": 0.6419, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.43341772151898733, | |
| "grad_norm": 1.0295850101630235, | |
| "learning_rate": 3.479674796747968e-05, | |
| "loss": 0.6392, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.43746835443037974, | |
| "grad_norm": 1.2451766619193538, | |
| "learning_rate": 3.51219512195122e-05, | |
| "loss": 0.6317, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.44151898734177214, | |
| "grad_norm": 0.759219963665096, | |
| "learning_rate": 3.544715447154472e-05, | |
| "loss": 0.6377, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.44556962025316454, | |
| "grad_norm": 0.9160457930109868, | |
| "learning_rate": 3.577235772357724e-05, | |
| "loss": 0.6284, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44962025316455695, | |
| "grad_norm": 1.2762244100941547, | |
| "learning_rate": 3.609756097560976e-05, | |
| "loss": 0.6314, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.45367088607594935, | |
| "grad_norm": 0.953814132845107, | |
| "learning_rate": 3.642276422764228e-05, | |
| "loss": 0.6298, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.45772151898734176, | |
| "grad_norm": 1.227529178232297, | |
| "learning_rate": 3.67479674796748e-05, | |
| "loss": 0.6405, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.46177215189873416, | |
| "grad_norm": 1.3445919689496657, | |
| "learning_rate": 3.7073170731707325e-05, | |
| "loss": 0.6189, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.46582278481012657, | |
| "grad_norm": 0.6641227292112333, | |
| "learning_rate": 3.739837398373984e-05, | |
| "loss": 0.6354, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.46987341772151897, | |
| "grad_norm": 0.7345865382530172, | |
| "learning_rate": 3.772357723577236e-05, | |
| "loss": 0.643, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4739240506329114, | |
| "grad_norm": 1.2673684968122978, | |
| "learning_rate": 3.804878048780488e-05, | |
| "loss": 0.6377, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.4779746835443038, | |
| "grad_norm": 1.0220561501182392, | |
| "learning_rate": 3.83739837398374e-05, | |
| "loss": 0.6212, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.4820253164556962, | |
| "grad_norm": 1.6078379451949094, | |
| "learning_rate": 3.869918699186992e-05, | |
| "loss": 0.6239, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.4860759493670886, | |
| "grad_norm": 0.6652128409014539, | |
| "learning_rate": 3.9024390243902444e-05, | |
| "loss": 0.6441, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.490126582278481, | |
| "grad_norm": 1.192603228608864, | |
| "learning_rate": 3.9349593495934966e-05, | |
| "loss": 0.6322, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4941772151898734, | |
| "grad_norm": 1.2787886383147804, | |
| "learning_rate": 3.967479674796748e-05, | |
| "loss": 0.6317, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.4982278481012658, | |
| "grad_norm": 0.7229243690355931, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6294, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5022784810126583, | |
| "grad_norm": 1.0248057535736221, | |
| "learning_rate": 3.999991946137476e-05, | |
| "loss": 0.6349, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5063291139240507, | |
| "grad_norm": 1.525586054482558, | |
| "learning_rate": 3.999967784614766e-05, | |
| "loss": 0.6275, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5103797468354431, | |
| "grad_norm": 0.8882333381653731, | |
| "learning_rate": 3.9999275156264656e-05, | |
| "loss": 0.6234, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5144303797468355, | |
| "grad_norm": 2.1241713787889, | |
| "learning_rate": 3.999871139496895e-05, | |
| "loss": 0.6555, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5184810126582279, | |
| "grad_norm": 1.3919506047718353, | |
| "learning_rate": 3.9997986566800995e-05, | |
| "loss": 0.6323, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5225316455696203, | |
| "grad_norm": 1.6732908651424134, | |
| "learning_rate": 3.999710067759846e-05, | |
| "loss": 0.6277, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5265822784810127, | |
| "grad_norm": 1.3101489734815353, | |
| "learning_rate": 3.999605373449617e-05, | |
| "loss": 0.638, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5306329113924051, | |
| "grad_norm": 1.6015574611565624, | |
| "learning_rate": 3.9994845745926075e-05, | |
| "loss": 0.6357, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5346835443037975, | |
| "grad_norm": 1.0127353201103801, | |
| "learning_rate": 3.999347672161713e-05, | |
| "loss": 0.6203, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5387341772151899, | |
| "grad_norm": 1.5568024472012802, | |
| "learning_rate": 3.999194667259528e-05, | |
| "loss": 0.6343, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5427848101265823, | |
| "grad_norm": 1.169633471584172, | |
| "learning_rate": 3.999025561118334e-05, | |
| "loss": 0.6388, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5468354430379747, | |
| "grad_norm": 1.5252329091515757, | |
| "learning_rate": 3.998840355100086e-05, | |
| "loss": 0.6345, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5508860759493671, | |
| "grad_norm": 1.3444595017666914, | |
| "learning_rate": 3.998639050696409e-05, | |
| "loss": 0.6298, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.5549367088607595, | |
| "grad_norm": 1.1443605082950221, | |
| "learning_rate": 3.998421649528582e-05, | |
| "loss": 0.6272, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5589873417721519, | |
| "grad_norm": 1.0079361454341442, | |
| "learning_rate": 3.9981881533475234e-05, | |
| "loss": 0.6425, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.5630379746835443, | |
| "grad_norm": 1.0029542686653323, | |
| "learning_rate": 3.997938564033779e-05, | |
| "loss": 0.6249, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5670886075949367, | |
| "grad_norm": 1.015564295707624, | |
| "learning_rate": 3.9976728835975064e-05, | |
| "loss": 0.6311, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5711392405063291, | |
| "grad_norm": 1.2557549600983018, | |
| "learning_rate": 3.9973911141784605e-05, | |
| "loss": 0.6287, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.5751898734177215, | |
| "grad_norm": 0.7468464286390285, | |
| "learning_rate": 3.997093258045973e-05, | |
| "loss": 0.6159, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.579240506329114, | |
| "grad_norm": 1.242760939558488, | |
| "learning_rate": 3.996779317598936e-05, | |
| "loss": 0.6276, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5832911392405064, | |
| "grad_norm": 0.8876839140869978, | |
| "learning_rate": 3.996449295365782e-05, | |
| "loss": 0.6253, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5873417721518988, | |
| "grad_norm": 1.142462004782762, | |
| "learning_rate": 3.996103194004467e-05, | |
| "loss": 0.609, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5913924050632912, | |
| "grad_norm": 0.956802960437125, | |
| "learning_rate": 3.995741016302441e-05, | |
| "loss": 0.6155, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5954430379746836, | |
| "grad_norm": 0.8216663909823639, | |
| "learning_rate": 3.9953627651766364e-05, | |
| "loss": 0.6099, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.599493670886076, | |
| "grad_norm": 0.843324655269676, | |
| "learning_rate": 3.9949684436734325e-05, | |
| "loss": 0.6204, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6035443037974684, | |
| "grad_norm": 0.7837263915872599, | |
| "learning_rate": 3.994558054968643e-05, | |
| "loss": 0.6344, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6075949367088608, | |
| "grad_norm": 0.9211042586175315, | |
| "learning_rate": 3.994131602367481e-05, | |
| "loss": 0.61, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6116455696202532, | |
| "grad_norm": 0.852038263104514, | |
| "learning_rate": 3.9936890893045376e-05, | |
| "loss": 0.6291, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6156962025316456, | |
| "grad_norm": 0.6083963620395382, | |
| "learning_rate": 3.993230519343752e-05, | |
| "loss": 0.6354, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.619746835443038, | |
| "grad_norm": 0.8495674708153879, | |
| "learning_rate": 3.992755896178383e-05, | |
| "loss": 0.6033, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6237974683544304, | |
| "grad_norm": 0.9448267933712936, | |
| "learning_rate": 3.992265223630981e-05, | |
| "loss": 0.6162, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6278481012658228, | |
| "grad_norm": 0.8429402834252556, | |
| "learning_rate": 3.991758505653355e-05, | |
| "loss": 0.6031, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6318987341772152, | |
| "grad_norm": 0.8990977661755324, | |
| "learning_rate": 3.991235746326543e-05, | |
| "loss": 0.6194, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6359493670886076, | |
| "grad_norm": 0.6268515530458345, | |
| "learning_rate": 3.9906969498607745e-05, | |
| "loss": 0.6103, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.5594599301744696, | |
| "learning_rate": 3.990142120595444e-05, | |
| "loss": 0.6214, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6440506329113924, | |
| "grad_norm": 0.7037342719961769, | |
| "learning_rate": 3.98957126299907e-05, | |
| "loss": 0.6086, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6481012658227848, | |
| "grad_norm": 0.8074926517981739, | |
| "learning_rate": 3.9889843816692596e-05, | |
| "loss": 0.6026, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6521518987341772, | |
| "grad_norm": 0.6177679520555956, | |
| "learning_rate": 3.9883814813326766e-05, | |
| "loss": 0.6112, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6562025316455696, | |
| "grad_norm": 0.5657529954730145, | |
| "learning_rate": 3.9877625668449956e-05, | |
| "loss": 0.605, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.660253164556962, | |
| "grad_norm": 0.6813629216776935, | |
| "learning_rate": 3.98712764319087e-05, | |
| "loss": 0.6292, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.6643037974683544, | |
| "grad_norm": 0.8974269735246865, | |
| "learning_rate": 3.9864767154838864e-05, | |
| "loss": 0.6105, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.6683544303797468, | |
| "grad_norm": 0.8403900958702467, | |
| "learning_rate": 3.9858097889665277e-05, | |
| "loss": 0.6221, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6724050632911392, | |
| "grad_norm": 0.6698290422618715, | |
| "learning_rate": 3.985126869010129e-05, | |
| "loss": 0.6307, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.6764556962025317, | |
| "grad_norm": 0.5509832314339451, | |
| "learning_rate": 3.984427961114833e-05, | |
| "loss": 0.6274, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.6805063291139241, | |
| "grad_norm": 0.5525040237617902, | |
| "learning_rate": 3.9837130709095475e-05, | |
| "loss": 0.6203, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.6845569620253165, | |
| "grad_norm": 0.836421107896438, | |
| "learning_rate": 3.982982204151901e-05, | |
| "loss": 0.6263, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6886075949367089, | |
| "grad_norm": 0.9552015114391759, | |
| "learning_rate": 3.982235366728193e-05, | |
| "loss": 0.5933, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6926582278481013, | |
| "grad_norm": 0.5322688550453594, | |
| "learning_rate": 3.9814725646533505e-05, | |
| "loss": 0.6054, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6967088607594937, | |
| "grad_norm": 0.6299715565354982, | |
| "learning_rate": 3.9806938040708746e-05, | |
| "loss": 0.6022, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7007594936708861, | |
| "grad_norm": 0.7014072892057753, | |
| "learning_rate": 3.9798990912527976e-05, | |
| "loss": 0.6179, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7048101265822785, | |
| "grad_norm": 0.6313198783319353, | |
| "learning_rate": 3.979088432599627e-05, | |
| "loss": 0.6036, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7088607594936709, | |
| "grad_norm": 0.4828474126073063, | |
| "learning_rate": 3.9782618346402964e-05, | |
| "loss": 0.6213, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7129113924050633, | |
| "grad_norm": 0.5221173010104053, | |
| "learning_rate": 3.977419304032111e-05, | |
| "loss": 0.6137, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7169620253164557, | |
| "grad_norm": 0.5619097443174723, | |
| "learning_rate": 3.976560847560697e-05, | |
| "loss": 0.6057, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7210126582278481, | |
| "grad_norm": 0.48458250268714326, | |
| "learning_rate": 3.9756864721399456e-05, | |
| "loss": 0.6078, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7250632911392405, | |
| "grad_norm": 0.5938847632181161, | |
| "learning_rate": 3.974796184811956e-05, | |
| "loss": 0.6154, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7291139240506329, | |
| "grad_norm": 0.8573099668344029, | |
| "learning_rate": 3.973889992746979e-05, | |
| "loss": 0.6058, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7331645569620253, | |
| "grad_norm": 0.9887704327797934, | |
| "learning_rate": 3.972967903243361e-05, | |
| "loss": 0.6076, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7372151898734177, | |
| "grad_norm": 0.8686374962177015, | |
| "learning_rate": 3.972029923727486e-05, | |
| "loss": 0.6217, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7412658227848101, | |
| "grad_norm": 0.7109343949802196, | |
| "learning_rate": 3.971076061753709e-05, | |
| "loss": 0.6035, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.7453164556962025, | |
| "grad_norm": 0.493792875999254, | |
| "learning_rate": 3.9701063250043066e-05, | |
| "loss": 0.6179, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7493670886075949, | |
| "grad_norm": 0.6295602586739065, | |
| "learning_rate": 3.969120721289402e-05, | |
| "loss": 0.6, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7534177215189873, | |
| "grad_norm": 0.5993221419342887, | |
| "learning_rate": 3.9681192585469146e-05, | |
| "loss": 0.6059, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.7574683544303797, | |
| "grad_norm": 0.6640111641033765, | |
| "learning_rate": 3.9671019448424865e-05, | |
| "loss": 0.606, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.7615189873417721, | |
| "grad_norm": 0.5340855976513834, | |
| "learning_rate": 3.966068788369422e-05, | |
| "loss": 0.61, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.7655696202531646, | |
| "grad_norm": 0.5094307398305351, | |
| "learning_rate": 3.965019797448622e-05, | |
| "loss": 0.6088, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.769620253164557, | |
| "grad_norm": 0.5222761909192823, | |
| "learning_rate": 3.963954980528515e-05, | |
| "loss": 0.6177, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7736708860759494, | |
| "grad_norm": 0.5192079744187618, | |
| "learning_rate": 3.9628743461849905e-05, | |
| "loss": 0.6134, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.7777215189873418, | |
| "grad_norm": 0.528368948026275, | |
| "learning_rate": 3.961777903121329e-05, | |
| "loss": 0.5897, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.7817721518987342, | |
| "grad_norm": 0.4875660748536816, | |
| "learning_rate": 3.960665660168131e-05, | |
| "loss": 0.6054, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.7858227848101266, | |
| "grad_norm": 0.4405163656576639, | |
| "learning_rate": 3.9595376262832485e-05, | |
| "loss": 0.6059, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.789873417721519, | |
| "grad_norm": 0.5839220175599222, | |
| "learning_rate": 3.9583938105517127e-05, | |
| "loss": 0.599, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7939240506329114, | |
| "grad_norm": 0.5312412693211577, | |
| "learning_rate": 3.957234222185657e-05, | |
| "loss": 0.6185, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.7979746835443038, | |
| "grad_norm": 0.4262982798701605, | |
| "learning_rate": 3.9560588705242474e-05, | |
| "loss": 0.6013, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8020253164556962, | |
| "grad_norm": 0.6199389475787582, | |
| "learning_rate": 3.954867765033605e-05, | |
| "loss": 0.6, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8060759493670886, | |
| "grad_norm": 0.7139097282232219, | |
| "learning_rate": 3.953660915306728e-05, | |
| "loss": 0.6053, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.810126582278481, | |
| "grad_norm": 0.5962012783773114, | |
| "learning_rate": 3.952438331063419e-05, | |
| "loss": 0.6258, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8141772151898734, | |
| "grad_norm": 0.588874409693904, | |
| "learning_rate": 3.951200022150205e-05, | |
| "loss": 0.601, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8182278481012658, | |
| "grad_norm": 0.5285767735454469, | |
| "learning_rate": 3.949945998540253e-05, | |
| "loss": 0.6069, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8222784810126582, | |
| "grad_norm": 0.4613117720774959, | |
| "learning_rate": 3.9486762703332993e-05, | |
| "loss": 0.6083, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8263291139240506, | |
| "grad_norm": 0.44568915286136745, | |
| "learning_rate": 3.947390847755559e-05, | |
| "loss": 0.5986, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.830379746835443, | |
| "grad_norm": 0.49643346478004824, | |
| "learning_rate": 3.946089741159648e-05, | |
| "loss": 0.6118, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8344303797468354, | |
| "grad_norm": 0.48199145601359433, | |
| "learning_rate": 3.944772961024501e-05, | |
| "loss": 0.6055, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8384810126582278, | |
| "grad_norm": 0.4385394919190343, | |
| "learning_rate": 3.943440517955285e-05, | |
| "loss": 0.6029, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.8425316455696202, | |
| "grad_norm": 0.5227377418078802, | |
| "learning_rate": 3.9420924226833126e-05, | |
| "loss": 0.6123, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8465822784810126, | |
| "grad_norm": 0.5557498184432195, | |
| "learning_rate": 3.9407286860659566e-05, | |
| "loss": 0.5893, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.850632911392405, | |
| "grad_norm": 0.47087717307213717, | |
| "learning_rate": 3.9393493190865657e-05, | |
| "loss": 0.5982, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8546835443037974, | |
| "grad_norm": 0.6724579733407169, | |
| "learning_rate": 3.937954332854371e-05, | |
| "loss": 0.5992, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.8587341772151899, | |
| "grad_norm": 0.6020203634243056, | |
| "learning_rate": 3.9365437386044016e-05, | |
| "loss": 0.5989, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.8627848101265823, | |
| "grad_norm": 0.5707962035272884, | |
| "learning_rate": 3.935117547697387e-05, | |
| "loss": 0.6123, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.8668354430379747, | |
| "grad_norm": 0.5821269609930904, | |
| "learning_rate": 3.933675771619675e-05, | |
| "loss": 0.5991, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.8708860759493671, | |
| "grad_norm": 0.6422558154108303, | |
| "learning_rate": 3.932218421983131e-05, | |
| "loss": 0.5926, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8749367088607595, | |
| "grad_norm": 0.5564434387251845, | |
| "learning_rate": 3.9307455105250484e-05, | |
| "loss": 0.6186, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.8789873417721519, | |
| "grad_norm": 0.49360865756263667, | |
| "learning_rate": 3.929257049108054e-05, | |
| "loss": 0.5951, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.8830379746835443, | |
| "grad_norm": 0.6371197132961622, | |
| "learning_rate": 3.927753049720011e-05, | |
| "loss": 0.5998, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.8870886075949367, | |
| "grad_norm": 0.6402339583511993, | |
| "learning_rate": 3.9262335244739234e-05, | |
| "loss": 0.5996, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.8911392405063291, | |
| "grad_norm": 0.5484042182270151, | |
| "learning_rate": 3.92469848560784e-05, | |
| "loss": 0.597, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8951898734177215, | |
| "grad_norm": 0.6337938041969547, | |
| "learning_rate": 3.923147945484751e-05, | |
| "loss": 0.5958, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.8992405063291139, | |
| "grad_norm": 0.5434746613682024, | |
| "learning_rate": 3.9215819165924956e-05, | |
| "loss": 0.5926, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9032911392405063, | |
| "grad_norm": 0.39940339370234795, | |
| "learning_rate": 3.920000411543654e-05, | |
| "loss": 0.594, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9073417721518987, | |
| "grad_norm": 0.46933456129664114, | |
| "learning_rate": 3.9184034430754495e-05, | |
| "loss": 0.5936, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9113924050632911, | |
| "grad_norm": 0.5432580453018409, | |
| "learning_rate": 3.916791024049648e-05, | |
| "loss": 0.5997, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9154430379746835, | |
| "grad_norm": 0.5039426677924892, | |
| "learning_rate": 3.91516316745245e-05, | |
| "loss": 0.5983, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9194936708860759, | |
| "grad_norm": 0.5026347462058494, | |
| "learning_rate": 3.913519886394389e-05, | |
| "loss": 0.6007, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9235443037974683, | |
| "grad_norm": 0.6239700304616167, | |
| "learning_rate": 3.911861194110225e-05, | |
| "loss": 0.5975, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.9275949367088607, | |
| "grad_norm": 0.7157505625944839, | |
| "learning_rate": 3.910187103958837e-05, | |
| "loss": 0.6019, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9316455696202531, | |
| "grad_norm": 0.4652247278302937, | |
| "learning_rate": 3.908497629423117e-05, | |
| "loss": 0.5904, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9356962025316455, | |
| "grad_norm": 0.6552376372616936, | |
| "learning_rate": 3.9067927841098614e-05, | |
| "loss": 0.5943, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.9397468354430379, | |
| "grad_norm": 0.7142902168970736, | |
| "learning_rate": 3.9050725817496594e-05, | |
| "loss": 0.6188, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9437974683544303, | |
| "grad_norm": 0.614359786592411, | |
| "learning_rate": 3.9033370361967844e-05, | |
| "loss": 0.5995, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.9478481012658228, | |
| "grad_norm": 0.5890522049738569, | |
| "learning_rate": 3.901586161429081e-05, | |
| "loss": 0.6062, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.9518987341772152, | |
| "grad_norm": 0.5689810380195013, | |
| "learning_rate": 3.8998199715478545e-05, | |
| "loss": 0.6001, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9559493670886076, | |
| "grad_norm": 0.4291509046067673, | |
| "learning_rate": 3.8980384807777564e-05, | |
| "loss": 0.5898, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.5121008007374366, | |
| "learning_rate": 3.896241703466667e-05, | |
| "loss": 0.6036, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.9640506329113924, | |
| "grad_norm": 0.4813346213177489, | |
| "learning_rate": 3.894429654085585e-05, | |
| "loss": 0.5997, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.9681012658227848, | |
| "grad_norm": 0.4793607764038437, | |
| "learning_rate": 3.892602347228505e-05, | |
| "loss": 0.5959, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.9721518987341772, | |
| "grad_norm": 0.4478139598266485, | |
| "learning_rate": 3.890759797612307e-05, | |
| "loss": 0.5923, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9762025316455696, | |
| "grad_norm": 0.433162071207074, | |
| "learning_rate": 3.888902020076632e-05, | |
| "loss": 0.5936, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.980253164556962, | |
| "grad_norm": 0.5729835230052921, | |
| "learning_rate": 3.887029029583764e-05, | |
| "loss": 0.5946, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.9843037974683544, | |
| "grad_norm": 0.4347336495590074, | |
| "learning_rate": 3.8851408412185125e-05, | |
| "loss": 0.5929, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.9883544303797468, | |
| "grad_norm": 0.5976960746396264, | |
| "learning_rate": 3.8832374701880855e-05, | |
| "loss": 0.5933, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.9924050632911392, | |
| "grad_norm": 0.5338444592333565, | |
| "learning_rate": 3.881318931821972e-05, | |
| "loss": 0.5885, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9964556962025316, | |
| "grad_norm": 0.5303643665077618, | |
| "learning_rate": 3.879385241571817e-05, | |
| "loss": 0.5995, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.000506329113924, | |
| "grad_norm": 0.5500071608632071, | |
| "learning_rate": 3.8774364150112955e-05, | |
| "loss": 0.587, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.0045569620253165, | |
| "grad_norm": 0.47554915240229956, | |
| "learning_rate": 3.8754724678359884e-05, | |
| "loss": 0.5514, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.0086075949367088, | |
| "grad_norm": 0.4992773099624107, | |
| "learning_rate": 3.873493415863256e-05, | |
| "loss": 0.5638, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.0126582278481013, | |
| "grad_norm": 0.6072353660091911, | |
| "learning_rate": 3.871499275032111e-05, | |
| "loss": 0.5534, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0167088607594936, | |
| "grad_norm": 0.5181358091193021, | |
| "learning_rate": 3.869490061403091e-05, | |
| "loss": 0.5529, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.0207594936708861, | |
| "grad_norm": 0.48118018117331696, | |
| "learning_rate": 3.867465791158124e-05, | |
| "loss": 0.5509, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.0248101265822784, | |
| "grad_norm": 0.5357875757629944, | |
| "learning_rate": 3.865426480600407e-05, | |
| "loss": 0.5602, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.028860759493671, | |
| "grad_norm": 0.6024913819748591, | |
| "learning_rate": 3.863372146154264e-05, | |
| "loss": 0.5595, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.0329113924050632, | |
| "grad_norm": 0.5122244261222243, | |
| "learning_rate": 3.861302804365024e-05, | |
| "loss": 0.5646, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0369620253164558, | |
| "grad_norm": 0.4746427479000092, | |
| "learning_rate": 3.85921847189888e-05, | |
| "loss": 0.5392, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.041012658227848, | |
| "grad_norm": 0.4273815868751707, | |
| "learning_rate": 3.85711916554276e-05, | |
| "loss": 0.5562, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.0450632911392406, | |
| "grad_norm": 0.4241982358340587, | |
| "learning_rate": 3.85500490220419e-05, | |
| "loss": 0.5483, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.0491139240506329, | |
| "grad_norm": 0.42810644286613164, | |
| "learning_rate": 3.852875698911154e-05, | |
| "loss": 0.5572, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.0531645569620254, | |
| "grad_norm": 0.4866573211882829, | |
| "learning_rate": 3.850731572811963e-05, | |
| "loss": 0.5514, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.0572151898734177, | |
| "grad_norm": 0.7019981598437524, | |
| "learning_rate": 3.848572541175116e-05, | |
| "loss": 0.5434, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.0612658227848102, | |
| "grad_norm": 0.6509258776076047, | |
| "learning_rate": 3.846398621389154e-05, | |
| "loss": 0.5269, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.0653164556962025, | |
| "grad_norm": 0.47697440269419494, | |
| "learning_rate": 3.84420983096253e-05, | |
| "loss": 0.5577, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.069367088607595, | |
| "grad_norm": 0.629370595703202, | |
| "learning_rate": 3.8420061875234606e-05, | |
| "loss": 0.5521, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.0734177215189873, | |
| "grad_norm": 0.4709554820325284, | |
| "learning_rate": 3.839787708819787e-05, | |
| "loss": 0.5525, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0774683544303798, | |
| "grad_norm": 0.46024366360545865, | |
| "learning_rate": 3.8375544127188325e-05, | |
| "loss": 0.5513, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.081518987341772, | |
| "grad_norm": 0.4829546452797468, | |
| "learning_rate": 3.8353063172072564e-05, | |
| "loss": 0.5455, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.0855696202531646, | |
| "grad_norm": 0.49588600155937496, | |
| "learning_rate": 3.8330434403909105e-05, | |
| "loss": 0.5425, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.089620253164557, | |
| "grad_norm": 0.45222460171716566, | |
| "learning_rate": 3.8307658004946934e-05, | |
| "loss": 0.5542, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.0936708860759494, | |
| "grad_norm": 0.48141903592195834, | |
| "learning_rate": 3.8284734158624046e-05, | |
| "loss": 0.5569, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0977215189873417, | |
| "grad_norm": 0.45639846540679607, | |
| "learning_rate": 3.826166304956594e-05, | |
| "loss": 0.5518, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.1017721518987342, | |
| "grad_norm": 0.5150877153095482, | |
| "learning_rate": 3.8238444863584164e-05, | |
| "loss": 0.5645, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.1058227848101265, | |
| "grad_norm": 0.47269601228990676, | |
| "learning_rate": 3.821507978767479e-05, | |
| "loss": 0.5416, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.109873417721519, | |
| "grad_norm": 0.45054706027822283, | |
| "learning_rate": 3.819156801001693e-05, | |
| "loss": 0.5452, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1139240506329113, | |
| "grad_norm": 0.4288542529602165, | |
| "learning_rate": 3.816790971997121e-05, | |
| "loss": 0.5576, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1179746835443038, | |
| "grad_norm": 0.47553750754751306, | |
| "learning_rate": 3.8144105108078246e-05, | |
| "loss": 0.5372, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.1220253164556961, | |
| "grad_norm": 0.45912063598866276, | |
| "learning_rate": 3.81201543660571e-05, | |
| "loss": 0.5568, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.1260759493670887, | |
| "grad_norm": 0.533218488391862, | |
| "learning_rate": 3.809605768680377e-05, | |
| "loss": 0.5468, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.130126582278481, | |
| "grad_norm": 0.5340660442916709, | |
| "learning_rate": 3.807181526438958e-05, | |
| "loss": 0.5302, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.1341772151898735, | |
| "grad_norm": 0.5205756484859887, | |
| "learning_rate": 3.8047427294059697e-05, | |
| "loss": 0.5394, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1382278481012658, | |
| "grad_norm": 0.6326794284713414, | |
| "learning_rate": 3.802289397223145e-05, | |
| "loss": 0.5516, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.1422784810126583, | |
| "grad_norm": 0.5752748780382161, | |
| "learning_rate": 3.7998215496492854e-05, | |
| "loss": 0.5468, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.1463291139240506, | |
| "grad_norm": 0.7100075144875339, | |
| "learning_rate": 3.797339206560096e-05, | |
| "loss": 0.5583, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.150379746835443, | |
| "grad_norm": 0.5194528593048473, | |
| "learning_rate": 3.794842387948027e-05, | |
| "loss": 0.5551, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.1544303797468354, | |
| "grad_norm": 0.4495828385334631, | |
| "learning_rate": 3.7923311139221114e-05, | |
| "loss": 0.5437, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.158481012658228, | |
| "grad_norm": 0.5473781759804024, | |
| "learning_rate": 3.7898054047078054e-05, | |
| "loss": 0.5444, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.1625316455696202, | |
| "grad_norm": 0.571803502668593, | |
| "learning_rate": 3.787265280646825e-05, | |
| "loss": 0.5612, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.1665822784810127, | |
| "grad_norm": 0.6358882536684146, | |
| "learning_rate": 3.7847107621969786e-05, | |
| "loss": 0.5419, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.170632911392405, | |
| "grad_norm": 0.5690157108797107, | |
| "learning_rate": 3.7821418699320064e-05, | |
| "loss": 0.547, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.1746835443037975, | |
| "grad_norm": 0.49333548405763644, | |
| "learning_rate": 3.7795586245414145e-05, | |
| "loss": 0.5506, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1787341772151898, | |
| "grad_norm": 0.5824239573734334, | |
| "learning_rate": 3.776961046830306e-05, | |
| "loss": 0.5367, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.1827848101265823, | |
| "grad_norm": 0.6874573712018205, | |
| "learning_rate": 3.774349157719215e-05, | |
| "loss": 0.5489, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.1868354430379746, | |
| "grad_norm": 0.5331898343471256, | |
| "learning_rate": 3.7717229782439365e-05, | |
| "loss": 0.554, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.1908860759493671, | |
| "grad_norm": 0.45159237925798407, | |
| "learning_rate": 3.769082529555359e-05, | |
| "loss": 0.5447, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.1949367088607594, | |
| "grad_norm": 0.46653595802228726, | |
| "learning_rate": 3.766427832919294e-05, | |
| "loss": 0.5377, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.198987341772152, | |
| "grad_norm": 0.6102207124766773, | |
| "learning_rate": 3.7637589097163024e-05, | |
| "loss": 0.566, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.2030379746835442, | |
| "grad_norm": 0.559336763542953, | |
| "learning_rate": 3.761075781441526e-05, | |
| "loss": 0.5644, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.2070886075949367, | |
| "grad_norm": 0.4237326266656369, | |
| "learning_rate": 3.75837846970451e-05, | |
| "loss": 0.5484, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.211139240506329, | |
| "grad_norm": 0.4295195248427764, | |
| "learning_rate": 3.755666996229032e-05, | |
| "loss": 0.5331, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.2151898734177216, | |
| "grad_norm": 0.5285807695162228, | |
| "learning_rate": 3.752941382852927e-05, | |
| "loss": 0.5554, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.2192405063291138, | |
| "grad_norm": 0.5789596902987457, | |
| "learning_rate": 3.7502016515279115e-05, | |
| "loss": 0.5446, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.2232911392405064, | |
| "grad_norm": 0.4545036984330519, | |
| "learning_rate": 3.7474478243194043e-05, | |
| "loss": 0.5576, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.2273417721518987, | |
| "grad_norm": 0.5013885182345832, | |
| "learning_rate": 3.744679923406351e-05, | |
| "loss": 0.5588, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.2313924050632912, | |
| "grad_norm": 0.6072046752643824, | |
| "learning_rate": 3.741897971081043e-05, | |
| "loss": 0.5639, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.2354430379746835, | |
| "grad_norm": 0.5815730659431051, | |
| "learning_rate": 3.739101989748946e-05, | |
| "loss": 0.5526, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.239493670886076, | |
| "grad_norm": 0.5305379835506069, | |
| "learning_rate": 3.7362920019285066e-05, | |
| "loss": 0.5411, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.2435443037974683, | |
| "grad_norm": 0.48569343206796184, | |
| "learning_rate": 3.73346803025098e-05, | |
| "loss": 0.5389, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.2475949367088608, | |
| "grad_norm": 0.584739034141005, | |
| "learning_rate": 3.730630097460247e-05, | |
| "loss": 0.5325, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.251645569620253, | |
| "grad_norm": 0.4193077559961448, | |
| "learning_rate": 3.727778226412628e-05, | |
| "loss": 0.5504, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.2556962025316456, | |
| "grad_norm": 0.5526901611302676, | |
| "learning_rate": 3.7249124400767006e-05, | |
| "loss": 0.5495, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.259746835443038, | |
| "grad_norm": 0.7133154742039458, | |
| "learning_rate": 3.722032761533114e-05, | |
| "loss": 0.5524, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.2637974683544304, | |
| "grad_norm": 0.48721207071453104, | |
| "learning_rate": 3.719139213974403e-05, | |
| "loss": 0.5457, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.2678481012658227, | |
| "grad_norm": 0.45461213715281523, | |
| "learning_rate": 3.7162318207048006e-05, | |
| "loss": 0.5393, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.2718987341772152, | |
| "grad_norm": 0.5106805479818406, | |
| "learning_rate": 3.713310605140055e-05, | |
| "loss": 0.5391, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.2759493670886077, | |
| "grad_norm": 0.4140392259424703, | |
| "learning_rate": 3.710375590807233e-05, | |
| "loss": 0.5398, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.4164917853050876, | |
| "learning_rate": 3.7074268013445365e-05, | |
| "loss": 0.5656, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.2840506329113923, | |
| "grad_norm": 0.4682037370853518, | |
| "learning_rate": 3.7044642605011114e-05, | |
| "loss": 0.5398, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.2881012658227848, | |
| "grad_norm": 0.4602566288065545, | |
| "learning_rate": 3.701487992136854e-05, | |
| "loss": 0.5461, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.2921518987341774, | |
| "grad_norm": 0.40355892984393266, | |
| "learning_rate": 3.69849802022222e-05, | |
| "loss": 0.5424, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.2962025316455696, | |
| "grad_norm": 0.5030157465478555, | |
| "learning_rate": 3.6954943688380334e-05, | |
| "loss": 0.5527, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.300253164556962, | |
| "grad_norm": 0.4318964729187145, | |
| "learning_rate": 3.692477062175289e-05, | |
| "loss": 0.5497, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.3043037974683545, | |
| "grad_norm": 0.394028927160198, | |
| "learning_rate": 3.689446124534958e-05, | |
| "loss": 0.5325, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.308354430379747, | |
| "grad_norm": 0.42954927562396006, | |
| "learning_rate": 3.686401580327799e-05, | |
| "loss": 0.5574, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3124050632911393, | |
| "grad_norm": 0.4526565447881357, | |
| "learning_rate": 3.683343454074149e-05, | |
| "loss": 0.5536, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3164556962025316, | |
| "grad_norm": 0.4779515164221075, | |
| "learning_rate": 3.6802717704037386e-05, | |
| "loss": 0.5328, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.320506329113924, | |
| "grad_norm": 0.40347027199928304, | |
| "learning_rate": 3.6771865540554855e-05, | |
| "loss": 0.5393, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.3245569620253166, | |
| "grad_norm": 0.5572561392412894, | |
| "learning_rate": 3.674087829877297e-05, | |
| "loss": 0.5522, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.3286075949367089, | |
| "grad_norm": 0.40317352917455396, | |
| "learning_rate": 3.6709756228258735e-05, | |
| "loss": 0.5533, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.3326582278481012, | |
| "grad_norm": 0.5678906492158503, | |
| "learning_rate": 3.667849957966501e-05, | |
| "loss": 0.5548, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.3367088607594937, | |
| "grad_norm": 0.47921280991988013, | |
| "learning_rate": 3.6647108604728546e-05, | |
| "loss": 0.5458, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3407594936708862, | |
| "grad_norm": 0.5123511240169487, | |
| "learning_rate": 3.661558355626795e-05, | |
| "loss": 0.5514, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.3448101265822785, | |
| "grad_norm": 0.4020620349725275, | |
| "learning_rate": 3.658392468818163e-05, | |
| "loss": 0.5523, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.3488607594936708, | |
| "grad_norm": 0.47002920021294187, | |
| "learning_rate": 3.655213225544574e-05, | |
| "loss": 0.5496, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.3529113924050633, | |
| "grad_norm": 0.5040830445834008, | |
| "learning_rate": 3.652020651411218e-05, | |
| "loss": 0.5469, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.3569620253164558, | |
| "grad_norm": 0.4230989726236193, | |
| "learning_rate": 3.6488147721306474e-05, | |
| "loss": 0.5432, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.3610126582278481, | |
| "grad_norm": 0.4285348583010353, | |
| "learning_rate": 3.645595613522574e-05, | |
| "loss": 0.5492, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.3650632911392404, | |
| "grad_norm": 0.47567361635291877, | |
| "learning_rate": 3.642363201513657e-05, | |
| "loss": 0.5472, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.369113924050633, | |
| "grad_norm": 0.5605263023128432, | |
| "learning_rate": 3.6391175621373006e-05, | |
| "loss": 0.5519, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.3731645569620254, | |
| "grad_norm": 0.37552053212175884, | |
| "learning_rate": 3.6358587215334355e-05, | |
| "loss": 0.554, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.3772151898734177, | |
| "grad_norm": 0.4997050340494049, | |
| "learning_rate": 3.632586705948318e-05, | |
| "loss": 0.5449, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.38126582278481, | |
| "grad_norm": 0.5562556711374784, | |
| "learning_rate": 3.629301541734311e-05, | |
| "loss": 0.5362, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.3853164556962025, | |
| "grad_norm": 0.48991140891773255, | |
| "learning_rate": 3.626003255349676e-05, | |
| "loss": 0.5538, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.389367088607595, | |
| "grad_norm": 0.4561967022339903, | |
| "learning_rate": 3.622691873358357e-05, | |
| "loss": 0.5471, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.3934177215189874, | |
| "grad_norm": 0.3954695649456603, | |
| "learning_rate": 3.61936742242977e-05, | |
| "loss": 0.5402, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.3974683544303796, | |
| "grad_norm": 0.47077689118293536, | |
| "learning_rate": 3.6160299293385864e-05, | |
| "loss": 0.5476, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.4015189873417722, | |
| "grad_norm": 0.36429771524009263, | |
| "learning_rate": 3.612679420964516e-05, | |
| "loss": 0.5545, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.4055696202531647, | |
| "grad_norm": 0.4253212657113402, | |
| "learning_rate": 3.609315924292092e-05, | |
| "loss": 0.551, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.409620253164557, | |
| "grad_norm": 0.39418758899046846, | |
| "learning_rate": 3.6059394664104554e-05, | |
| "loss": 0.544, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.4136708860759493, | |
| "grad_norm": 0.441803094431973, | |
| "learning_rate": 3.602550074513133e-05, | |
| "loss": 0.5428, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.4177215189873418, | |
| "grad_norm": 0.4399055634781674, | |
| "learning_rate": 3.599147775897822e-05, | |
| "loss": 0.5476, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4217721518987343, | |
| "grad_norm": 0.4458150877037388, | |
| "learning_rate": 3.595732597966167e-05, | |
| "loss": 0.5414, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.4258227848101266, | |
| "grad_norm": 0.44899552827415684, | |
| "learning_rate": 3.592304568223542e-05, | |
| "loss": 0.5432, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.4298734177215189, | |
| "grad_norm": 0.35170470372855966, | |
| "learning_rate": 3.588863714278826e-05, | |
| "loss": 0.5441, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.4339240506329114, | |
| "grad_norm": 0.3987402435012038, | |
| "learning_rate": 3.585410063844186e-05, | |
| "loss": 0.5455, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.437974683544304, | |
| "grad_norm": 0.4610527746655635, | |
| "learning_rate": 3.581943644734846e-05, | |
| "loss": 0.543, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.4420253164556962, | |
| "grad_norm": 0.3820155227887942, | |
| "learning_rate": 3.578464484868869e-05, | |
| "loss": 0.5431, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.4460759493670885, | |
| "grad_norm": 0.40538970543505604, | |
| "learning_rate": 3.5749726122669316e-05, | |
| "loss": 0.5483, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.450126582278481, | |
| "grad_norm": 0.4941609680886557, | |
| "learning_rate": 3.5714680550520943e-05, | |
| "loss": 0.5434, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.4541772151898735, | |
| "grad_norm": 0.541997276032994, | |
| "learning_rate": 3.5679508414495794e-05, | |
| "loss": 0.5477, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.4582278481012658, | |
| "grad_norm": 0.5642857519590568, | |
| "learning_rate": 3.564420999786543e-05, | |
| "loss": 0.5453, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4622784810126581, | |
| "grad_norm": 0.5323601713654209, | |
| "learning_rate": 3.560878558491842e-05, | |
| "loss": 0.5545, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.4663291139240506, | |
| "grad_norm": 0.5159977383844734, | |
| "learning_rate": 3.5573235460958145e-05, | |
| "loss": 0.5365, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.4703797468354431, | |
| "grad_norm": 0.4427609450451662, | |
| "learning_rate": 3.553755991230039e-05, | |
| "loss": 0.5473, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.4744303797468354, | |
| "grad_norm": 0.47963303821292386, | |
| "learning_rate": 3.5501759226271144e-05, | |
| "loss": 0.5437, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.4784810126582277, | |
| "grad_norm": 0.3692107078743929, | |
| "learning_rate": 3.546583369120419e-05, | |
| "loss": 0.5412, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4825316455696202, | |
| "grad_norm": 0.4726027151812385, | |
| "learning_rate": 3.5429783596438864e-05, | |
| "loss": 0.5487, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.4865822784810128, | |
| "grad_norm": 0.5477158914701177, | |
| "learning_rate": 3.539360923231766e-05, | |
| "loss": 0.5495, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.490632911392405, | |
| "grad_norm": 0.5112201820786759, | |
| "learning_rate": 3.535731089018394e-05, | |
| "loss": 0.5325, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.4946835443037974, | |
| "grad_norm": 0.41235035484151883, | |
| "learning_rate": 3.532088886237956e-05, | |
| "loss": 0.5264, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.4987341772151899, | |
| "grad_norm": 0.5601143157981107, | |
| "learning_rate": 3.528434344224253e-05, | |
| "loss": 0.5418, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.5027848101265824, | |
| "grad_norm": 0.46037913089000987, | |
| "learning_rate": 3.524767492410464e-05, | |
| "loss": 0.5426, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.5068354430379747, | |
| "grad_norm": 0.42209428923513703, | |
| "learning_rate": 3.521088360328908e-05, | |
| "loss": 0.5526, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.510886075949367, | |
| "grad_norm": 0.411234079680589, | |
| "learning_rate": 3.517396977610811e-05, | |
| "loss": 0.5401, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.5149367088607595, | |
| "grad_norm": 0.47642025260768994, | |
| "learning_rate": 3.5136933739860595e-05, | |
| "loss": 0.5466, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.518987341772152, | |
| "grad_norm": 0.5099760316987239, | |
| "learning_rate": 3.509977579282971e-05, | |
| "loss": 0.5442, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.5230379746835443, | |
| "grad_norm": 0.40371040377670553, | |
| "learning_rate": 3.5062496234280424e-05, | |
| "loss": 0.5477, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.5270886075949366, | |
| "grad_norm": 0.5062065127252333, | |
| "learning_rate": 3.502509536445719e-05, | |
| "loss": 0.5538, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.531139240506329, | |
| "grad_norm": 0.4234150075360343, | |
| "learning_rate": 3.498757348458147e-05, | |
| "loss": 0.5421, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.5351898734177216, | |
| "grad_norm": 0.38730022641140516, | |
| "learning_rate": 3.4949930896849324e-05, | |
| "loss": 0.5451, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.539240506329114, | |
| "grad_norm": 0.33241443525402675, | |
| "learning_rate": 3.491216790442899e-05, | |
| "loss": 0.546, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.5432911392405062, | |
| "grad_norm": 0.4284241369469206, | |
| "learning_rate": 3.487428481145839e-05, | |
| "loss": 0.5548, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.5473417721518987, | |
| "grad_norm": 0.37376058916670324, | |
| "learning_rate": 3.483628192304278e-05, | |
| "loss": 0.5359, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.5513924050632912, | |
| "grad_norm": 0.36837594114970984, | |
| "learning_rate": 3.479815954525219e-05, | |
| "loss": 0.5427, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.5554430379746835, | |
| "grad_norm": 0.3879312011678323, | |
| "learning_rate": 3.475991798511899e-05, | |
| "loss": 0.5438, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.5594936708860758, | |
| "grad_norm": 0.39649417439881224, | |
| "learning_rate": 3.4721557550635464e-05, | |
| "loss": 0.5539, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.5635443037974683, | |
| "grad_norm": 0.35506664906645824, | |
| "learning_rate": 3.468307855075128e-05, | |
| "loss": 0.5501, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.5675949367088609, | |
| "grad_norm": 0.4174460997170703, | |
| "learning_rate": 3.4644481295371005e-05, | |
| "loss": 0.5363, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.5716455696202531, | |
| "grad_norm": 0.32226208469568524, | |
| "learning_rate": 3.460576609535163e-05, | |
| "loss": 0.5593, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.5756962025316454, | |
| "grad_norm": 0.4045986822087714, | |
| "learning_rate": 3.456693326250006e-05, | |
| "loss": 0.5378, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.579746835443038, | |
| "grad_norm": 0.32051097916176075, | |
| "learning_rate": 3.452798310957058e-05, | |
| "loss": 0.5536, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5837974683544305, | |
| "grad_norm": 0.33587806002092474, | |
| "learning_rate": 3.4488915950262386e-05, | |
| "loss": 0.5454, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.5878481012658228, | |
| "grad_norm": 0.3283821046323556, | |
| "learning_rate": 3.4449732099216985e-05, | |
| "loss": 0.5382, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.591898734177215, | |
| "grad_norm": 0.3775819330210603, | |
| "learning_rate": 3.441043187201574e-05, | |
| "loss": 0.5383, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.5959493670886076, | |
| "grad_norm": 0.4462507037045206, | |
| "learning_rate": 3.437101558517728e-05, | |
| "loss": 0.5461, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.39146848476142787, | |
| "learning_rate": 3.433148355615496e-05, | |
| "loss": 0.539, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.6040506329113924, | |
| "grad_norm": 0.44343126791856813, | |
| "learning_rate": 3.4291836103334294e-05, | |
| "loss": 0.5446, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.6081012658227847, | |
| "grad_norm": 0.41580431921978134, | |
| "learning_rate": 3.425207354603043e-05, | |
| "loss": 0.5418, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.6121518987341772, | |
| "grad_norm": 0.34714839307359463, | |
| "learning_rate": 3.421219620448553e-05, | |
| "loss": 0.5305, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.6162025316455697, | |
| "grad_norm": 0.36359874379967816, | |
| "learning_rate": 3.417220439986623e-05, | |
| "loss": 0.5449, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.620253164556962, | |
| "grad_norm": 0.358430464566614, | |
| "learning_rate": 3.4132098454261024e-05, | |
| "loss": 0.554, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.6243037974683543, | |
| "grad_norm": 0.40237926786474026, | |
| "learning_rate": 3.4091878690677676e-05, | |
| "loss": 0.5484, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.6283544303797468, | |
| "grad_norm": 0.3737579658803788, | |
| "learning_rate": 3.405154543304065e-05, | |
| "loss": 0.5404, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.6324050632911393, | |
| "grad_norm": 0.3599967872924873, | |
| "learning_rate": 3.401109900618843e-05, | |
| "loss": 0.5455, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.6364556962025316, | |
| "grad_norm": 0.3530018194588855, | |
| "learning_rate": 3.3970539735870996e-05, | |
| "loss": 0.545, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.640506329113924, | |
| "grad_norm": 0.2814475653399262, | |
| "learning_rate": 3.392986794874714e-05, | |
| "loss": 0.5355, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.6445569620253164, | |
| "grad_norm": 0.3190457686061442, | |
| "learning_rate": 3.388908397238184e-05, | |
| "loss": 0.5284, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.648607594936709, | |
| "grad_norm": 0.30743908814888693, | |
| "learning_rate": 3.384818813524362e-05, | |
| "loss": 0.5409, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.6526582278481012, | |
| "grad_norm": 0.34052422733021664, | |
| "learning_rate": 3.380718076670195e-05, | |
| "loss": 0.543, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.6567088607594935, | |
| "grad_norm": 0.33979173186098643, | |
| "learning_rate": 3.376606219702454e-05, | |
| "loss": 0.5369, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.660759493670886, | |
| "grad_norm": 0.3504167763147821, | |
| "learning_rate": 3.372483275737468e-05, | |
| "loss": 0.5496, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.6648101265822786, | |
| "grad_norm": 0.3593326852304351, | |
| "learning_rate": 3.368349277980861e-05, | |
| "loss": 0.5359, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.6688607594936709, | |
| "grad_norm": 0.36321133560412144, | |
| "learning_rate": 3.3642042597272844e-05, | |
| "loss": 0.5564, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.6729113924050631, | |
| "grad_norm": 0.3459380973026731, | |
| "learning_rate": 3.360048254360144e-05, | |
| "loss": 0.5409, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.6769620253164557, | |
| "grad_norm": 0.3814388950086124, | |
| "learning_rate": 3.355881295351336e-05, | |
| "loss": 0.5403, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.6810126582278482, | |
| "grad_norm": 0.33422596461029963, | |
| "learning_rate": 3.351703416260975e-05, | |
| "loss": 0.5425, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.6850632911392405, | |
| "grad_norm": 0.4005529329042578, | |
| "learning_rate": 3.347514650737126e-05, | |
| "loss": 0.5483, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.6891139240506328, | |
| "grad_norm": 0.4689663913144769, | |
| "learning_rate": 3.3433150325155295e-05, | |
| "loss": 0.5394, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.6931645569620253, | |
| "grad_norm": 0.4129597775171812, | |
| "learning_rate": 3.339104595419334e-05, | |
| "loss": 0.5474, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.6972151898734178, | |
| "grad_norm": 0.359522282612482, | |
| "learning_rate": 3.3348833733588204e-05, | |
| "loss": 0.5394, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.70126582278481, | |
| "grad_norm": 0.3911319108521518, | |
| "learning_rate": 3.3306514003311305e-05, | |
| "loss": 0.5374, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.7053164556962024, | |
| "grad_norm": 0.39425084594829646, | |
| "learning_rate": 3.326408710419996e-05, | |
| "loss": 0.5374, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.709367088607595, | |
| "grad_norm": 0.30569033685733393, | |
| "learning_rate": 3.322155337795454e-05, | |
| "loss": 0.532, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.7134177215189874, | |
| "grad_norm": 0.4114689788440459, | |
| "learning_rate": 3.317891316713587e-05, | |
| "loss": 0.546, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.7174683544303797, | |
| "grad_norm": 0.33406229895892525, | |
| "learning_rate": 3.313616681516231e-05, | |
| "loss": 0.5415, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.721518987341772, | |
| "grad_norm": 0.41967888871029113, | |
| "learning_rate": 3.309331466630713e-05, | |
| "loss": 0.541, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.7255696202531645, | |
| "grad_norm": 0.484167006840953, | |
| "learning_rate": 3.305035706569563e-05, | |
| "loss": 0.5305, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.729620253164557, | |
| "grad_norm": 0.3139086327219068, | |
| "learning_rate": 3.3007294359302433e-05, | |
| "loss": 0.5394, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.7336708860759493, | |
| "grad_norm": 0.46066214072594497, | |
| "learning_rate": 3.296412689394864e-05, | |
| "loss": 0.5546, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.7377215189873416, | |
| "grad_norm": 0.4185469507503111, | |
| "learning_rate": 3.292085501729909e-05, | |
| "loss": 0.5345, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.7417721518987341, | |
| "grad_norm": 0.41935690556386274, | |
| "learning_rate": 3.2877479077859534e-05, | |
| "loss": 0.5351, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.7458227848101266, | |
| "grad_norm": 0.34296794400016556, | |
| "learning_rate": 3.283399942497381e-05, | |
| "loss": 0.5325, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.749873417721519, | |
| "grad_norm": 0.4811290118980767, | |
| "learning_rate": 3.279041640882108e-05, | |
| "loss": 0.5492, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.7539240506329112, | |
| "grad_norm": 0.43846278587864007, | |
| "learning_rate": 3.2746730380412964e-05, | |
| "loss": 0.5281, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.7579746835443038, | |
| "grad_norm": 0.3796763443512012, | |
| "learning_rate": 3.2702941691590726e-05, | |
| "loss": 0.5361, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.7620253164556963, | |
| "grad_norm": 0.385344437134616, | |
| "learning_rate": 3.265905069502244e-05, | |
| "loss": 0.5342, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.7660759493670886, | |
| "grad_norm": 0.3254915092609221, | |
| "learning_rate": 3.261505774420016e-05, | |
| "loss": 0.5446, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.7701265822784809, | |
| "grad_norm": 0.4029359908992895, | |
| "learning_rate": 3.257096319343707e-05, | |
| "loss": 0.5343, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.7741772151898734, | |
| "grad_norm": 0.38983654386834293, | |
| "learning_rate": 3.2526767397864614e-05, | |
| "loss": 0.53, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.7782278481012659, | |
| "grad_norm": 0.4021689513277025, | |
| "learning_rate": 3.248247071342966e-05, | |
| "loss": 0.5444, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.7822784810126582, | |
| "grad_norm": 0.3607143499620443, | |
| "learning_rate": 3.243807349689161e-05, | |
| "loss": 0.5336, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.7863291139240505, | |
| "grad_norm": 0.38248097317152213, | |
| "learning_rate": 3.2393576105819544e-05, | |
| "loss": 0.541, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.790379746835443, | |
| "grad_norm": 0.4645642163819531, | |
| "learning_rate": 3.2348978898589333e-05, | |
| "loss": 0.544, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.7944303797468355, | |
| "grad_norm": 0.36762229321673434, | |
| "learning_rate": 3.230428223438075e-05, | |
| "loss": 0.536, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.7984810126582278, | |
| "grad_norm": 0.5031829811007891, | |
| "learning_rate": 3.225948647317459e-05, | |
| "loss": 0.547, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.80253164556962, | |
| "grad_norm": 0.5046474170023305, | |
| "learning_rate": 3.2214591975749745e-05, | |
| "loss": 0.5303, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.8065822784810126, | |
| "grad_norm": 0.36754667338138813, | |
| "learning_rate": 3.216959910368034e-05, | |
| "loss": 0.5421, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.8106329113924051, | |
| "grad_norm": 0.5850658243554608, | |
| "learning_rate": 3.212450821933277e-05, | |
| "loss": 0.5495, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.8146835443037974, | |
| "grad_norm": 0.4182716792735133, | |
| "learning_rate": 3.207931968586281e-05, | |
| "loss": 0.5494, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.81873417721519, | |
| "grad_norm": 0.45043513670861124, | |
| "learning_rate": 3.203403386721272e-05, | |
| "loss": 0.5411, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.8227848101265822, | |
| "grad_norm": 0.4038368882468168, | |
| "learning_rate": 3.1988651128108245e-05, | |
| "loss": 0.5372, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.8268354430379747, | |
| "grad_norm": 0.4298717566855572, | |
| "learning_rate": 3.194317183405573e-05, | |
| "loss": 0.5315, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.8308860759493673, | |
| "grad_norm": 0.4726151353762397, | |
| "learning_rate": 3.189759635133914e-05, | |
| "loss": 0.553, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.8349367088607595, | |
| "grad_norm": 0.39174820163890167, | |
| "learning_rate": 3.185192504701718e-05, | |
| "loss": 0.5357, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.8389873417721518, | |
| "grad_norm": 0.40237597754217974, | |
| "learning_rate": 3.1806158288920234e-05, | |
| "loss": 0.5423, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.8430379746835444, | |
| "grad_norm": 0.4747406954262624, | |
| "learning_rate": 3.1760296445647477e-05, | |
| "loss": 0.547, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.8470886075949369, | |
| "grad_norm": 0.3541130128504745, | |
| "learning_rate": 3.1714339886563896e-05, | |
| "loss": 0.5342, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.8511392405063292, | |
| "grad_norm": 0.5084942759911607, | |
| "learning_rate": 3.166828898179731e-05, | |
| "loss": 0.5351, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.8551898734177215, | |
| "grad_norm": 0.4189932373056671, | |
| "learning_rate": 3.162214410223536e-05, | |
| "loss": 0.5415, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.859240506329114, | |
| "grad_norm": 0.40641601601318933, | |
| "learning_rate": 3.157590561952257e-05, | |
| "loss": 0.5298, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.8632911392405065, | |
| "grad_norm": 0.39071157968080206, | |
| "learning_rate": 3.152957390605732e-05, | |
| "loss": 0.55, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.8673417721518988, | |
| "grad_norm": 0.407840622797626, | |
| "learning_rate": 3.148314933498886e-05, | |
| "loss": 0.5363, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.871392405063291, | |
| "grad_norm": 0.38872539171390313, | |
| "learning_rate": 3.143663228021431e-05, | |
| "loss": 0.5215, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.8754430379746836, | |
| "grad_norm": 0.4294321426809109, | |
| "learning_rate": 3.1390023116375624e-05, | |
| "loss": 0.545, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.879493670886076, | |
| "grad_norm": 0.4308652310896162, | |
| "learning_rate": 3.134332221885661e-05, | |
| "loss": 0.5375, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.8835443037974684, | |
| "grad_norm": 0.3387274210920273, | |
| "learning_rate": 3.129652996377987e-05, | |
| "loss": 0.5283, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.8875949367088607, | |
| "grad_norm": 0.40569876035698665, | |
| "learning_rate": 3.12496467280038e-05, | |
| "loss": 0.5432, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.8916455696202532, | |
| "grad_norm": 0.35560049369197233, | |
| "learning_rate": 3.120267288911952e-05, | |
| "loss": 0.5279, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.8956962025316457, | |
| "grad_norm": 0.4191037550397765, | |
| "learning_rate": 3.11556088254479e-05, | |
| "loss": 0.5321, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.899746835443038, | |
| "grad_norm": 0.41871093330355297, | |
| "learning_rate": 3.11084549160364e-05, | |
| "loss": 0.5459, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.9037974683544303, | |
| "grad_norm": 0.4319516986373976, | |
| "learning_rate": 3.106121154065615e-05, | |
| "loss": 0.5342, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.9078481012658228, | |
| "grad_norm": 0.3370333795937814, | |
| "learning_rate": 3.1013879079798805e-05, | |
| "loss": 0.5403, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.9118987341772153, | |
| "grad_norm": 0.3807589850618723, | |
| "learning_rate": 3.096645791467348e-05, | |
| "loss": 0.535, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.9159493670886076, | |
| "grad_norm": 0.40311505539743026, | |
| "learning_rate": 3.091894842720373e-05, | |
| "loss": 0.5295, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.3922674265774569, | |
| "learning_rate": 3.0871351000024425e-05, | |
| "loss": 0.5396, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.9240506329113924, | |
| "grad_norm": 0.3738344532460926, | |
| "learning_rate": 3.0823666016478716e-05, | |
| "loss": 0.5285, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.928101265822785, | |
| "grad_norm": 0.44853215225367477, | |
| "learning_rate": 3.0775893860614896e-05, | |
| "loss": 0.5465, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.9321518987341773, | |
| "grad_norm": 0.43311216786335766, | |
| "learning_rate": 3.0728034917183336e-05, | |
| "loss": 0.5457, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.9362025316455695, | |
| "grad_norm": 0.34032032637744697, | |
| "learning_rate": 3.06800895716334e-05, | |
| "loss": 0.5449, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.940253164556962, | |
| "grad_norm": 0.3931144547446437, | |
| "learning_rate": 3.063205821011029e-05, | |
| "loss": 0.5494, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.9443037974683546, | |
| "grad_norm": 0.4070740566429497, | |
| "learning_rate": 3.0583941219452016e-05, | |
| "loss": 0.5342, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.9483544303797469, | |
| "grad_norm": 0.33179746275159194, | |
| "learning_rate": 3.053573898718618e-05, | |
| "loss": 0.5416, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.9524050632911392, | |
| "grad_norm": 0.34338178798351077, | |
| "learning_rate": 3.0487451901526956e-05, | |
| "loss": 0.5209, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.9564556962025317, | |
| "grad_norm": 0.3647239671816951, | |
| "learning_rate": 3.0439080351371875e-05, | |
| "loss": 0.5394, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.9605063291139242, | |
| "grad_norm": 0.31168158356926057, | |
| "learning_rate": 3.0390624726298764e-05, | |
| "loss": 0.5329, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.9645569620253165, | |
| "grad_norm": 0.3648233022038886, | |
| "learning_rate": 3.034208541656255e-05, | |
| "loss": 0.5246, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.9686075949367088, | |
| "grad_norm": 0.38633942162924395, | |
| "learning_rate": 3.029346281309218e-05, | |
| "loss": 0.5403, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.9726582278481013, | |
| "grad_norm": 0.2922907360326553, | |
| "learning_rate": 3.0244757307487415e-05, | |
| "loss": 0.5397, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.9767088607594938, | |
| "grad_norm": 0.4006882666755273, | |
| "learning_rate": 3.019596929201569e-05, | |
| "loss": 0.5247, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.980759493670886, | |
| "grad_norm": 0.33899849711146635, | |
| "learning_rate": 3.0147099159608985e-05, | |
| "loss": 0.5345, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.9848101265822784, | |
| "grad_norm": 0.37265611273226523, | |
| "learning_rate": 3.0098147303860616e-05, | |
| "loss": 0.5345, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.988860759493671, | |
| "grad_norm": 0.38542510250047196, | |
| "learning_rate": 3.0049114119022117e-05, | |
| "loss": 0.5344, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.9929113924050634, | |
| "grad_norm": 0.3201548157337632, | |
| "learning_rate": 3.0000000000000004e-05, | |
| "loss": 0.536, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.9969620253164557, | |
| "grad_norm": 0.4263237846259171, | |
| "learning_rate": 2.995080534235264e-05, | |
| "loss": 0.5381, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.001012658227848, | |
| "grad_norm": 0.36218251181469985, | |
| "learning_rate": 2.9901530542287044e-05, | |
| "loss": 0.5266, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.0050632911392405, | |
| "grad_norm": 0.43475503289619366, | |
| "learning_rate": 2.9852175996655676e-05, | |
| "loss": 0.4831, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.009113924050633, | |
| "grad_norm": 0.5522471080433271, | |
| "learning_rate": 2.980274210295326e-05, | |
| "loss": 0.479, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.013164556962025, | |
| "grad_norm": 0.3763731344340183, | |
| "learning_rate": 2.9753229259313578e-05, | |
| "loss": 0.4681, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.0172151898734176, | |
| "grad_norm": 0.34598711850824376, | |
| "learning_rate": 2.9703637864506274e-05, | |
| "loss": 0.4842, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.02126582278481, | |
| "grad_norm": 0.3782078547535528, | |
| "learning_rate": 2.965396831793362e-05, | |
| "loss": 0.4757, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.0253164556962027, | |
| "grad_norm": 0.37458123967019824, | |
| "learning_rate": 2.9604221019627316e-05, | |
| "loss": 0.4819, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0293670886075947, | |
| "grad_norm": 0.33955315351754267, | |
| "learning_rate": 2.955439637024526e-05, | |
| "loss": 0.4839, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.0334177215189873, | |
| "grad_norm": 0.40896110380817935, | |
| "learning_rate": 2.9504494771068334e-05, | |
| "loss": 0.4759, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.0374683544303798, | |
| "grad_norm": 0.3605416805918467, | |
| "learning_rate": 2.9454516623997156e-05, | |
| "loss": 0.4939, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.0415189873417723, | |
| "grad_norm": 0.4221400567759612, | |
| "learning_rate": 2.9404462331548847e-05, | |
| "loss": 0.4825, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.0455696202531644, | |
| "grad_norm": 0.4190616759508317, | |
| "learning_rate": 2.93543322968538e-05, | |
| "loss": 0.4712, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.049620253164557, | |
| "grad_norm": 0.3224446286046344, | |
| "learning_rate": 2.9304126923652428e-05, | |
| "loss": 0.4834, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.0536708860759494, | |
| "grad_norm": 0.39847285228636997, | |
| "learning_rate": 2.9253846616291896e-05, | |
| "loss": 0.4668, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.057721518987342, | |
| "grad_norm": 0.3564689682421963, | |
| "learning_rate": 2.9203491779722896e-05, | |
| "loss": 0.481, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.061772151898734, | |
| "grad_norm": 0.3429867251651154, | |
| "learning_rate": 2.9153062819496357e-05, | |
| "loss": 0.4737, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.0658227848101265, | |
| "grad_norm": 0.38063658223910224, | |
| "learning_rate": 2.9102560141760178e-05, | |
| "loss": 0.4709, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.069873417721519, | |
| "grad_norm": 0.37830501811677425, | |
| "learning_rate": 2.9051984153256004e-05, | |
| "loss": 0.4726, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.0739240506329115, | |
| "grad_norm": 0.38477889289713846, | |
| "learning_rate": 2.900133526131588e-05, | |
| "loss": 0.4828, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.0779746835443036, | |
| "grad_norm": 0.4445786120257467, | |
| "learning_rate": 2.8950613873859025e-05, | |
| "loss": 0.468, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.082025316455696, | |
| "grad_norm": 0.41793146648504415, | |
| "learning_rate": 2.8899820399388515e-05, | |
| "loss": 0.4721, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.0860759493670886, | |
| "grad_norm": 0.3860261304768386, | |
| "learning_rate": 2.8848955246988012e-05, | |
| "loss": 0.4697, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.090126582278481, | |
| "grad_norm": 0.3720994154655176, | |
| "learning_rate": 2.879801882631847e-05, | |
| "loss": 0.4825, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.094177215189873, | |
| "grad_norm": 0.35385203146409083, | |
| "learning_rate": 2.8747011547614808e-05, | |
| "loss": 0.4633, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.0982278481012657, | |
| "grad_norm": 0.3603771922977517, | |
| "learning_rate": 2.8695933821682635e-05, | |
| "loss": 0.4861, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.1022784810126582, | |
| "grad_norm": 0.3778891319887707, | |
| "learning_rate": 2.864478605989494e-05, | |
| "loss": 0.4681, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.1063291139240508, | |
| "grad_norm": 0.349902238703878, | |
| "learning_rate": 2.8593568674188765e-05, | |
| "loss": 0.4929, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.110379746835443, | |
| "grad_norm": 0.3746456445697909, | |
| "learning_rate": 2.8542282077061892e-05, | |
| "loss": 0.4749, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.1144303797468353, | |
| "grad_norm": 0.3639611067974695, | |
| "learning_rate": 2.8490926681569523e-05, | |
| "loss": 0.4653, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.118481012658228, | |
| "grad_norm": 0.3916102310017982, | |
| "learning_rate": 2.8439502901320956e-05, | |
| "loss": 0.4885, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.1225316455696204, | |
| "grad_norm": 0.4047614105274411, | |
| "learning_rate": 2.8388011150476237e-05, | |
| "loss": 0.4793, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.1265822784810124, | |
| "grad_norm": 0.38664428878208346, | |
| "learning_rate": 2.8336451843742866e-05, | |
| "loss": 0.4726, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.130632911392405, | |
| "grad_norm": 0.3976255417102989, | |
| "learning_rate": 2.8284825396372387e-05, | |
| "loss": 0.4809, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.1346835443037975, | |
| "grad_norm": 0.4235761412797531, | |
| "learning_rate": 2.8233132224157132e-05, | |
| "loss": 0.4646, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.13873417721519, | |
| "grad_norm": 0.4003990097045247, | |
| "learning_rate": 2.8181372743426805e-05, | |
| "loss": 0.4603, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.1427848101265825, | |
| "grad_norm": 0.35581775666365445, | |
| "learning_rate": 2.8129547371045128e-05, | |
| "loss": 0.4776, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.1468354430379746, | |
| "grad_norm": 0.41567461108049586, | |
| "learning_rate": 2.8077656524406534e-05, | |
| "loss": 0.4775, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.150886075949367, | |
| "grad_norm": 0.35808509338973943, | |
| "learning_rate": 2.802570062143278e-05, | |
| "loss": 0.4668, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.1549367088607596, | |
| "grad_norm": 0.3818576059096302, | |
| "learning_rate": 2.7973680080569555e-05, | |
| "loss": 0.4768, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.1589873417721517, | |
| "grad_norm": 0.359203069366619, | |
| "learning_rate": 2.792159532078314e-05, | |
| "loss": 0.4732, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.163037974683544, | |
| "grad_norm": 0.3927046311769122, | |
| "learning_rate": 2.7869446761557033e-05, | |
| "loss": 0.47, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.1670886075949367, | |
| "grad_norm": 0.36032673902912415, | |
| "learning_rate": 2.781723482288857e-05, | |
| "loss": 0.4912, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.1711392405063292, | |
| "grad_norm": 0.38689889007703165, | |
| "learning_rate": 2.7764959925285517e-05, | |
| "loss": 0.4814, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.1751898734177217, | |
| "grad_norm": 0.3499205347793075, | |
| "learning_rate": 2.771262248976272e-05, | |
| "loss": 0.4775, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.179240506329114, | |
| "grad_norm": 0.3660668046449536, | |
| "learning_rate": 2.7660222937838677e-05, | |
| "loss": 0.4737, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.1832911392405063, | |
| "grad_norm": 0.38354677422363054, | |
| "learning_rate": 2.7607761691532186e-05, | |
| "loss": 0.4679, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.187341772151899, | |
| "grad_norm": 0.4363583648431936, | |
| "learning_rate": 2.7555239173358916e-05, | |
| "loss": 0.4782, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.191392405063291, | |
| "grad_norm": 0.3758152190318853, | |
| "learning_rate": 2.7502655806328e-05, | |
| "loss": 0.4839, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.1954430379746834, | |
| "grad_norm": 0.38797637545358965, | |
| "learning_rate": 2.7450012013938648e-05, | |
| "loss": 0.4788, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.199493670886076, | |
| "grad_norm": 0.43575466484649805, | |
| "learning_rate": 2.739730822017673e-05, | |
| "loss": 0.4692, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.2035443037974685, | |
| "grad_norm": 0.3396448750393881, | |
| "learning_rate": 2.7344544849511355e-05, | |
| "loss": 0.4845, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.207594936708861, | |
| "grad_norm": 0.4449590039824782, | |
| "learning_rate": 2.7291722326891456e-05, | |
| "loss": 0.4657, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.211645569620253, | |
| "grad_norm": 0.3763570885174587, | |
| "learning_rate": 2.723884107774236e-05, | |
| "loss": 0.4808, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.2156962025316456, | |
| "grad_norm": 0.33682135330786095, | |
| "learning_rate": 2.718590152796239e-05, | |
| "loss": 0.4662, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.219746835443038, | |
| "grad_norm": 0.3765215512411809, | |
| "learning_rate": 2.71329041039194e-05, | |
| "loss": 0.4789, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.22379746835443, | |
| "grad_norm": 0.33605421933389956, | |
| "learning_rate": 2.7079849232447357e-05, | |
| "loss": 0.4759, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.2278481012658227, | |
| "grad_norm": 0.3030403177119961, | |
| "learning_rate": 2.7026737340842895e-05, | |
| "loss": 0.4918, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.231898734177215, | |
| "grad_norm": 0.4002023860778905, | |
| "learning_rate": 2.697356885686189e-05, | |
| "loss": 0.4937, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.2359493670886077, | |
| "grad_norm": 0.2915139390600507, | |
| "learning_rate": 2.6920344208716014e-05, | |
| "loss": 0.4761, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.3326407417483593, | |
| "learning_rate": 2.6867063825069252e-05, | |
| "loss": 0.4749, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.2440506329113923, | |
| "grad_norm": 0.33196624723074347, | |
| "learning_rate": 2.6813728135034494e-05, | |
| "loss": 0.4769, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.248101265822785, | |
| "grad_norm": 0.3218304755722093, | |
| "learning_rate": 2.6760337568170056e-05, | |
| "loss": 0.4821, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.2521518987341773, | |
| "grad_norm": 0.34508694911009485, | |
| "learning_rate": 2.6706892554476226e-05, | |
| "loss": 0.4657, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.2562025316455694, | |
| "grad_norm": 0.33474022696005784, | |
| "learning_rate": 2.6653393524391795e-05, | |
| "loss": 0.4832, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.260253164556962, | |
| "grad_norm": 0.34516062097121053, | |
| "learning_rate": 2.6599840908790592e-05, | |
| "loss": 0.4684, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.2643037974683544, | |
| "grad_norm": 0.4150549466606544, | |
| "learning_rate": 2.6546235138978028e-05, | |
| "loss": 0.4689, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.268354430379747, | |
| "grad_norm": 0.28113382538262743, | |
| "learning_rate": 2.6492576646687597e-05, | |
| "loss": 0.4769, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.2724050632911394, | |
| "grad_norm": 0.34650250414383393, | |
| "learning_rate": 2.6438865864077425e-05, | |
| "loss": 0.4731, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.2764556962025315, | |
| "grad_norm": 0.3094632400348333, | |
| "learning_rate": 2.6385103223726766e-05, | |
| "loss": 0.4769, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.280506329113924, | |
| "grad_norm": 0.31966921165855666, | |
| "learning_rate": 2.6331289158632537e-05, | |
| "loss": 0.4783, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.2845569620253166, | |
| "grad_norm": 0.335100320633454, | |
| "learning_rate": 2.6277424102205817e-05, | |
| "loss": 0.4688, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.2886075949367086, | |
| "grad_norm": 0.3272543767643052, | |
| "learning_rate": 2.6223508488268374e-05, | |
| "loss": 0.4825, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.292658227848101, | |
| "grad_norm": 0.3222351905734499, | |
| "learning_rate": 2.6169542751049148e-05, | |
| "loss": 0.4743, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.2967088607594937, | |
| "grad_norm": 0.29133982571615624, | |
| "learning_rate": 2.6115527325180754e-05, | |
| "loss": 0.4734, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.300759493670886, | |
| "grad_norm": 0.3526421961594564, | |
| "learning_rate": 2.606146264569603e-05, | |
| "loss": 0.4789, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.3048101265822787, | |
| "grad_norm": 0.2855632943793194, | |
| "learning_rate": 2.6007349148024447e-05, | |
| "loss": 0.4742, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.3088607594936708, | |
| "grad_norm": 0.2758318762766398, | |
| "learning_rate": 2.5953187267988694e-05, | |
| "loss": 0.4769, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.3129113924050633, | |
| "grad_norm": 0.28536802347088636, | |
| "learning_rate": 2.5898977441801097e-05, | |
| "loss": 0.4828, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.316962025316456, | |
| "grad_norm": 0.2912184815269533, | |
| "learning_rate": 2.584472010606015e-05, | |
| "loss": 0.4832, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.321012658227848, | |
| "grad_norm": 0.2926513723531926, | |
| "learning_rate": 2.5790415697746976e-05, | |
| "loss": 0.4846, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.3250632911392404, | |
| "grad_norm": 0.35350058469195755, | |
| "learning_rate": 2.5736064654221808e-05, | |
| "loss": 0.4851, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.329113924050633, | |
| "grad_norm": 0.3230309374903499, | |
| "learning_rate": 2.568166741322048e-05, | |
| "loss": 0.484, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.3331645569620254, | |
| "grad_norm": 0.3314096805628127, | |
| "learning_rate": 2.56272244128509e-05, | |
| "loss": 0.4855, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.337215189873418, | |
| "grad_norm": 0.3490015273323093, | |
| "learning_rate": 2.55727360915895e-05, | |
| "loss": 0.4746, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.34126582278481, | |
| "grad_norm": 0.32486101490565855, | |
| "learning_rate": 2.5518202888277734e-05, | |
| "loss": 0.4667, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.3453164556962025, | |
| "grad_norm": 0.3274095087103921, | |
| "learning_rate": 2.5463625242118523e-05, | |
| "loss": 0.4765, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.349367088607595, | |
| "grad_norm": 0.3422790574046964, | |
| "learning_rate": 2.5409003592672723e-05, | |
| "loss": 0.4694, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.353417721518987, | |
| "grad_norm": 0.29283828797123745, | |
| "learning_rate": 2.535433837985559e-05, | |
| "loss": 0.479, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.3574683544303796, | |
| "grad_norm": 0.30676104799342374, | |
| "learning_rate": 2.529963004393324e-05, | |
| "loss": 0.4839, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.361518987341772, | |
| "grad_norm": 0.2862687560334886, | |
| "learning_rate": 2.524487902551908e-05, | |
| "loss": 0.4761, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.3655696202531646, | |
| "grad_norm": 0.2918802675586107, | |
| "learning_rate": 2.519008576557029e-05, | |
| "loss": 0.4769, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.369620253164557, | |
| "grad_norm": 0.27279853932837567, | |
| "learning_rate": 2.5135250705384254e-05, | |
| "loss": 0.4725, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.3736708860759492, | |
| "grad_norm": 0.31605762881008237, | |
| "learning_rate": 2.5080374286595007e-05, | |
| "loss": 0.4829, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.3777215189873417, | |
| "grad_norm": 0.33091440623652313, | |
| "learning_rate": 2.5025456951169677e-05, | |
| "loss": 0.4817, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.3817721518987343, | |
| "grad_norm": 0.3408396691439725, | |
| "learning_rate": 2.4970499141404942e-05, | |
| "loss": 0.487, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.3858227848101268, | |
| "grad_norm": 0.3093916353671197, | |
| "learning_rate": 2.491550129992345e-05, | |
| "loss": 0.4651, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.389873417721519, | |
| "grad_norm": 0.37549655522254166, | |
| "learning_rate": 2.486046386967024e-05, | |
| "loss": 0.491, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.3939240506329114, | |
| "grad_norm": 0.33252863629927754, | |
| "learning_rate": 2.4805387293909214e-05, | |
| "loss": 0.4844, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.397974683544304, | |
| "grad_norm": 0.34696471159228814, | |
| "learning_rate": 2.4750272016219552e-05, | |
| "loss": 0.4825, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.4020253164556964, | |
| "grad_norm": 0.27874228653082794, | |
| "learning_rate": 2.4695118480492114e-05, | |
| "loss": 0.4767, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.4060759493670885, | |
| "grad_norm": 0.3526802932142688, | |
| "learning_rate": 2.4639927130925898e-05, | |
| "loss": 0.4636, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.410126582278481, | |
| "grad_norm": 0.3193929033201143, | |
| "learning_rate": 2.458469841202444e-05, | |
| "loss": 0.461, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.4141772151898735, | |
| "grad_norm": 0.2822612975314097, | |
| "learning_rate": 2.452943276859226e-05, | |
| "loss": 0.471, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.418227848101266, | |
| "grad_norm": 0.3263694612527554, | |
| "learning_rate": 2.447413064573125e-05, | |
| "loss": 0.471, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.422278481012658, | |
| "grad_norm": 0.2691493980927656, | |
| "learning_rate": 2.4418792488837095e-05, | |
| "loss": 0.4793, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.4263291139240506, | |
| "grad_norm": 0.3592824488420059, | |
| "learning_rate": 2.4363418743595713e-05, | |
| "loss": 0.4814, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.430379746835443, | |
| "grad_norm": 0.3272004210136757, | |
| "learning_rate": 2.430800985597963e-05, | |
| "loss": 0.4826, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.4344303797468356, | |
| "grad_norm": 0.3475348254171806, | |
| "learning_rate": 2.4252566272244415e-05, | |
| "loss": 0.475, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.4384810126582277, | |
| "grad_norm": 0.34155619397368925, | |
| "learning_rate": 2.4197088438925063e-05, | |
| "loss": 0.4719, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.44253164556962, | |
| "grad_norm": 0.36272403804820563, | |
| "learning_rate": 2.4141576802832417e-05, | |
| "loss": 0.4758, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.4465822784810127, | |
| "grad_norm": 0.35029028480560104, | |
| "learning_rate": 2.408603181104957e-05, | |
| "loss": 0.4723, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.4506329113924052, | |
| "grad_norm": 0.30681816427051056, | |
| "learning_rate": 2.4030453910928245e-05, | |
| "loss": 0.4711, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.4546835443037973, | |
| "grad_norm": 0.3116692192646477, | |
| "learning_rate": 2.397484355008521e-05, | |
| "loss": 0.4684, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.45873417721519, | |
| "grad_norm": 0.2902988121144744, | |
| "learning_rate": 2.3919201176398662e-05, | |
| "loss": 0.4689, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.4627848101265823, | |
| "grad_norm": 0.32033334456934104, | |
| "learning_rate": 2.3863527238004633e-05, | |
| "loss": 0.4744, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.466835443037975, | |
| "grad_norm": 0.30502349143264085, | |
| "learning_rate": 2.380782218329337e-05, | |
| "loss": 0.4736, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.470886075949367, | |
| "grad_norm": 0.30995443251241084, | |
| "learning_rate": 2.3752086460905725e-05, | |
| "loss": 0.4771, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.4749367088607594, | |
| "grad_norm": 0.3248501480122993, | |
| "learning_rate": 2.3696320519729544e-05, | |
| "loss": 0.4771, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.478987341772152, | |
| "grad_norm": 0.25236006669814004, | |
| "learning_rate": 2.3640524808896045e-05, | |
| "loss": 0.4791, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.4830379746835445, | |
| "grad_norm": 0.3114865461571946, | |
| "learning_rate": 2.3584699777776222e-05, | |
| "loss": 0.4791, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.4870886075949366, | |
| "grad_norm": 0.31236338864705204, | |
| "learning_rate": 2.3528845875977195e-05, | |
| "loss": 0.4714, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.491139240506329, | |
| "grad_norm": 0.28424314151270896, | |
| "learning_rate": 2.3472963553338614e-05, | |
| "loss": 0.474, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.4951898734177216, | |
| "grad_norm": 0.3336640402538505, | |
| "learning_rate": 2.341705325992901e-05, | |
| "loss": 0.4725, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.499240506329114, | |
| "grad_norm": 0.2656189488110121, | |
| "learning_rate": 2.336111544604222e-05, | |
| "loss": 0.4763, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.503291139240506, | |
| "grad_norm": 0.36847590302993144, | |
| "learning_rate": 2.33051505621937e-05, | |
| "loss": 0.475, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.5073417721518987, | |
| "grad_norm": 0.35753646224280883, | |
| "learning_rate": 2.324915905911693e-05, | |
| "loss": 0.4733, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.511392405063291, | |
| "grad_norm": 0.3494215093098695, | |
| "learning_rate": 2.319314138775977e-05, | |
| "loss": 0.4859, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.5154430379746833, | |
| "grad_norm": 0.38249763770266965, | |
| "learning_rate": 2.3137097999280856e-05, | |
| "loss": 0.4807, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.519493670886076, | |
| "grad_norm": 0.3186718222404792, | |
| "learning_rate": 2.308102934504593e-05, | |
| "loss": 0.4574, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.5235443037974683, | |
| "grad_norm": 0.35887336926079594, | |
| "learning_rate": 2.3024935876624222e-05, | |
| "loss": 0.4753, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.527594936708861, | |
| "grad_norm": 0.34259229369636574, | |
| "learning_rate": 2.2968818045784813e-05, | |
| "loss": 0.4812, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.5316455696202533, | |
| "grad_norm": 0.3979949591322303, | |
| "learning_rate": 2.2912676304493006e-05, | |
| "loss": 0.4801, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.5356962025316454, | |
| "grad_norm": 0.2892341488647758, | |
| "learning_rate": 2.2856511104906668e-05, | |
| "loss": 0.4802, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.539746835443038, | |
| "grad_norm": 0.33317174265454685, | |
| "learning_rate": 2.2800322899372586e-05, | |
| "loss": 0.4872, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.5437974683544304, | |
| "grad_norm": 0.3132697338781742, | |
| "learning_rate": 2.2744112140422844e-05, | |
| "loss": 0.4889, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.547848101265823, | |
| "grad_norm": 0.36276410992858144, | |
| "learning_rate": 2.2687879280771177e-05, | |
| "loss": 0.4817, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.5518987341772155, | |
| "grad_norm": 0.3515364607483676, | |
| "learning_rate": 2.26316247733093e-05, | |
| "loss": 0.4821, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.5559493670886075, | |
| "grad_norm": 0.3604397519094138, | |
| "learning_rate": 2.257534907110328e-05, | |
| "loss": 0.4867, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.30478968859405936, | |
| "learning_rate": 2.2519052627389882e-05, | |
| "loss": 0.4845, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.5640506329113926, | |
| "grad_norm": 0.3292016380262924, | |
| "learning_rate": 2.246273589557294e-05, | |
| "loss": 0.4732, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.5681012658227846, | |
| "grad_norm": 0.3515522413548847, | |
| "learning_rate": 2.240639932921966e-05, | |
| "loss": 0.4854, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.572151898734177, | |
| "grad_norm": 0.31006476976564123, | |
| "learning_rate": 2.2350043382056995e-05, | |
| "loss": 0.4853, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.5762025316455697, | |
| "grad_norm": 0.3556850594205431, | |
| "learning_rate": 2.2293668507968015e-05, | |
| "loss": 0.4758, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.580253164556962, | |
| "grad_norm": 0.2797881880751504, | |
| "learning_rate": 2.2237275160988186e-05, | |
| "loss": 0.473, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.5843037974683547, | |
| "grad_norm": 0.3434045556527874, | |
| "learning_rate": 2.2180863795301787e-05, | |
| "loss": 0.4716, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.5883544303797468, | |
| "grad_norm": 0.31737375629911485, | |
| "learning_rate": 2.212443486523819e-05, | |
| "loss": 0.4794, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.5924050632911393, | |
| "grad_norm": 0.2900116209765602, | |
| "learning_rate": 2.2067988825268243e-05, | |
| "loss": 0.4823, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.596455696202532, | |
| "grad_norm": 0.34729503449377835, | |
| "learning_rate": 2.2011526130000596e-05, | |
| "loss": 0.4795, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.600506329113924, | |
| "grad_norm": 0.2867838883203389, | |
| "learning_rate": 2.1955047234178038e-05, | |
| "loss": 0.4767, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.6045569620253164, | |
| "grad_norm": 0.41088219198172937, | |
| "learning_rate": 2.1898552592673825e-05, | |
| "loss": 0.4809, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.608607594936709, | |
| "grad_norm": 0.28244752841301873, | |
| "learning_rate": 2.184204266048803e-05, | |
| "loss": 0.4719, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.6126582278481014, | |
| "grad_norm": 0.38570926123915483, | |
| "learning_rate": 2.1785517892743887e-05, | |
| "loss": 0.4833, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.616708860759494, | |
| "grad_norm": 0.2854265371746082, | |
| "learning_rate": 2.17289787446841e-05, | |
| "loss": 0.4841, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.620759493670886, | |
| "grad_norm": 0.3071722363054009, | |
| "learning_rate": 2.1672425671667198e-05, | |
| "loss": 0.4778, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.6248101265822785, | |
| "grad_norm": 0.28661409653577496, | |
| "learning_rate": 2.161585912916385e-05, | |
| "loss": 0.4757, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.628860759493671, | |
| "grad_norm": 0.2627374037283905, | |
| "learning_rate": 2.1559279572753214e-05, | |
| "loss": 0.4819, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.632911392405063, | |
| "grad_norm": 0.30707730798794175, | |
| "learning_rate": 2.1502687458119268e-05, | |
| "loss": 0.4826, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.6369620253164556, | |
| "grad_norm": 0.25852415290575975, | |
| "learning_rate": 2.1446083241047116e-05, | |
| "loss": 0.4789, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.641012658227848, | |
| "grad_norm": 0.2903138910648452, | |
| "learning_rate": 2.1389467377419333e-05, | |
| "loss": 0.4847, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.6450632911392407, | |
| "grad_norm": 0.2571669475389089, | |
| "learning_rate": 2.133284032321232e-05, | |
| "loss": 0.4797, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.649113924050633, | |
| "grad_norm": 0.2773078827892671, | |
| "learning_rate": 2.1276202534492566e-05, | |
| "loss": 0.4771, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.6531645569620252, | |
| "grad_norm": 0.27219386243791915, | |
| "learning_rate": 2.121955446741306e-05, | |
| "loss": 0.4807, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.6572151898734178, | |
| "grad_norm": 0.2968779714208418, | |
| "learning_rate": 2.1162896578209517e-05, | |
| "loss": 0.48, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.6612658227848103, | |
| "grad_norm": 0.3062583497382708, | |
| "learning_rate": 2.1106229323196813e-05, | |
| "loss": 0.4847, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.6653164556962023, | |
| "grad_norm": 0.2796401366125329, | |
| "learning_rate": 2.1049553158765214e-05, | |
| "loss": 0.4743, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.669367088607595, | |
| "grad_norm": 0.2883572189424844, | |
| "learning_rate": 2.0992868541376764e-05, | |
| "loss": 0.477, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.6734177215189874, | |
| "grad_norm": 0.27801694602980254, | |
| "learning_rate": 2.093617592756158e-05, | |
| "loss": 0.4798, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.67746835443038, | |
| "grad_norm": 0.3193627113211239, | |
| "learning_rate": 2.0879475773914167e-05, | |
| "loss": 0.4761, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.6815189873417724, | |
| "grad_norm": 0.26885974629122417, | |
| "learning_rate": 2.082276853708978e-05, | |
| "loss": 0.4717, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.6855696202531645, | |
| "grad_norm": 0.2867671259884346, | |
| "learning_rate": 2.076605467380071e-05, | |
| "loss": 0.465, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.689620253164557, | |
| "grad_norm": 0.30369190188821454, | |
| "learning_rate": 2.0709334640812613e-05, | |
| "loss": 0.4848, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.6936708860759495, | |
| "grad_norm": 0.32604623804109556, | |
| "learning_rate": 2.0652608894940824e-05, | |
| "loss": 0.4783, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.6977215189873416, | |
| "grad_norm": 0.32735817951139423, | |
| "learning_rate": 2.0595877893046722e-05, | |
| "loss": 0.4817, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.701772151898734, | |
| "grad_norm": 0.3087098683795272, | |
| "learning_rate": 2.0539142092033985e-05, | |
| "loss": 0.4737, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.7058227848101266, | |
| "grad_norm": 0.29584430334651424, | |
| "learning_rate": 2.048240194884496e-05, | |
| "loss": 0.4825, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.709873417721519, | |
| "grad_norm": 0.250267747047268, | |
| "learning_rate": 2.042565792045695e-05, | |
| "loss": 0.4693, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.7139240506329116, | |
| "grad_norm": 0.32890306174242084, | |
| "learning_rate": 2.036891046387857e-05, | |
| "loss": 0.4898, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.7179746835443037, | |
| "grad_norm": 0.26997334534174616, | |
| "learning_rate": 2.0312160036146036e-05, | |
| "loss": 0.4745, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.7220253164556962, | |
| "grad_norm": 0.2717094895407256, | |
| "learning_rate": 2.025540709431948e-05, | |
| "loss": 0.4826, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.7260759493670887, | |
| "grad_norm": 0.2806731459090754, | |
| "learning_rate": 2.0198652095479298e-05, | |
| "loss": 0.4712, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.730126582278481, | |
| "grad_norm": 0.2635982610716554, | |
| "learning_rate": 2.014189549672245e-05, | |
| "loss": 0.4881, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.7341772151898733, | |
| "grad_norm": 0.29174167625157627, | |
| "learning_rate": 2.0085137755158776e-05, | |
| "loss": 0.4787, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.738227848101266, | |
| "grad_norm": 0.28356391401337294, | |
| "learning_rate": 2.0028379327907327e-05, | |
| "loss": 0.4836, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.7422784810126584, | |
| "grad_norm": 0.3051622905610548, | |
| "learning_rate": 1.9971620672092676e-05, | |
| "loss": 0.4773, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.746329113924051, | |
| "grad_norm": 0.3150607098143032, | |
| "learning_rate": 1.991486224484123e-05, | |
| "loss": 0.4706, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.750379746835443, | |
| "grad_norm": 0.2833550130785345, | |
| "learning_rate": 1.985810450327756e-05, | |
| "loss": 0.4784, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.7544303797468355, | |
| "grad_norm": 0.29800274057004567, | |
| "learning_rate": 1.9801347904520706e-05, | |
| "loss": 0.4865, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.758481012658228, | |
| "grad_norm": 0.3659637134691945, | |
| "learning_rate": 1.974459290568053e-05, | |
| "loss": 0.4743, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.76253164556962, | |
| "grad_norm": 0.28047729144792405, | |
| "learning_rate": 1.968783996385397e-05, | |
| "loss": 0.4855, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.7665822784810126, | |
| "grad_norm": 0.39218986424799895, | |
| "learning_rate": 1.963108953612143e-05, | |
| "loss": 0.4763, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.770632911392405, | |
| "grad_norm": 0.2723243967544914, | |
| "learning_rate": 1.9574342079543056e-05, | |
| "loss": 0.4719, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.7746835443037976, | |
| "grad_norm": 0.37145420446621163, | |
| "learning_rate": 1.9517598051155046e-05, | |
| "loss": 0.4812, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.77873417721519, | |
| "grad_norm": 0.3061463333237582, | |
| "learning_rate": 1.9460857907966025e-05, | |
| "loss": 0.4714, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.782784810126582, | |
| "grad_norm": 0.25483731288591094, | |
| "learning_rate": 1.9404122106953285e-05, | |
| "loss": 0.4687, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.7868354430379747, | |
| "grad_norm": 0.27962319481160547, | |
| "learning_rate": 1.9347391105059176e-05, | |
| "loss": 0.477, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.790886075949367, | |
| "grad_norm": 0.2851455439563356, | |
| "learning_rate": 1.92906653591874e-05, | |
| "loss": 0.4772, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.7949367088607593, | |
| "grad_norm": 0.279462694472255, | |
| "learning_rate": 1.9233945326199295e-05, | |
| "loss": 0.487, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.798987341772152, | |
| "grad_norm": 0.24248303724865086, | |
| "learning_rate": 1.917723146291022e-05, | |
| "loss": 0.4646, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.8030379746835443, | |
| "grad_norm": 0.26246364236862585, | |
| "learning_rate": 1.912052422608584e-05, | |
| "loss": 0.4873, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.807088607594937, | |
| "grad_norm": 0.2762944044046232, | |
| "learning_rate": 1.9063824072438428e-05, | |
| "loss": 0.4738, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.8111392405063294, | |
| "grad_norm": 0.2812736100561901, | |
| "learning_rate": 1.9007131458623246e-05, | |
| "loss": 0.4777, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.8151898734177214, | |
| "grad_norm": 0.26215713489178744, | |
| "learning_rate": 1.895044684123479e-05, | |
| "loss": 0.477, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.819240506329114, | |
| "grad_norm": 0.27664925570323157, | |
| "learning_rate": 1.8893770676803194e-05, | |
| "loss": 0.4708, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.8232911392405065, | |
| "grad_norm": 0.29264477980840436, | |
| "learning_rate": 1.8837103421790486e-05, | |
| "loss": 0.4825, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.8273417721518985, | |
| "grad_norm": 0.2780685787822027, | |
| "learning_rate": 1.8780445532586952e-05, | |
| "loss": 0.4891, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.831392405063291, | |
| "grad_norm": 0.28561716925670644, | |
| "learning_rate": 1.872379746550743e-05, | |
| "loss": 0.4678, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.8354430379746836, | |
| "grad_norm": 0.28336347582050636, | |
| "learning_rate": 1.866715967678769e-05, | |
| "loss": 0.4893, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.839493670886076, | |
| "grad_norm": 0.2783766954935952, | |
| "learning_rate": 1.861053262258067e-05, | |
| "loss": 0.4907, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.8435443037974686, | |
| "grad_norm": 0.2849008998529939, | |
| "learning_rate": 1.8553916758952897e-05, | |
| "loss": 0.4627, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.8475949367088607, | |
| "grad_norm": 0.2915410596288912, | |
| "learning_rate": 1.8497312541880735e-05, | |
| "loss": 0.4801, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.851645569620253, | |
| "grad_norm": 0.28530563148503196, | |
| "learning_rate": 1.8440720427246786e-05, | |
| "loss": 0.4821, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.8556962025316457, | |
| "grad_norm": 0.2818309014268536, | |
| "learning_rate": 1.8384140870836157e-05, | |
| "loss": 0.4825, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.8597468354430378, | |
| "grad_norm": 0.29390373688934235, | |
| "learning_rate": 1.8327574328332806e-05, | |
| "loss": 0.4717, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.8637974683544303, | |
| "grad_norm": 0.24495911510773882, | |
| "learning_rate": 1.8271021255315906e-05, | |
| "loss": 0.4646, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.867848101265823, | |
| "grad_norm": 0.3026569601410154, | |
| "learning_rate": 1.8214482107256117e-05, | |
| "loss": 0.4848, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.8718987341772153, | |
| "grad_norm": 0.2823724263776704, | |
| "learning_rate": 1.8157957339511968e-05, | |
| "loss": 0.4772, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.875949367088608, | |
| "grad_norm": 0.31224754823779277, | |
| "learning_rate": 1.8101447407326182e-05, | |
| "loss": 0.4866, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.3105167232519128, | |
| "learning_rate": 1.8044952765821966e-05, | |
| "loss": 0.4913, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.8840506329113924, | |
| "grad_norm": 0.29260198358429834, | |
| "learning_rate": 1.7988473869999407e-05, | |
| "loss": 0.4763, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.888101265822785, | |
| "grad_norm": 0.2864280785116009, | |
| "learning_rate": 1.7932011174731764e-05, | |
| "loss": 0.474, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.892151898734177, | |
| "grad_norm": 0.27861503618550526, | |
| "learning_rate": 1.7875565134761817e-05, | |
| "loss": 0.4811, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.8962025316455695, | |
| "grad_norm": 0.30879463826854525, | |
| "learning_rate": 1.7819136204698226e-05, | |
| "loss": 0.4635, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.900253164556962, | |
| "grad_norm": 0.27861226754598467, | |
| "learning_rate": 1.776272483901182e-05, | |
| "loss": 0.4763, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.9043037974683545, | |
| "grad_norm": 0.3160370177740227, | |
| "learning_rate": 1.7706331492031995e-05, | |
| "loss": 0.4834, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.908354430379747, | |
| "grad_norm": 0.28104722121537434, | |
| "learning_rate": 1.764995661794301e-05, | |
| "loss": 0.4857, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.912405063291139, | |
| "grad_norm": 0.29967480423503984, | |
| "learning_rate": 1.759360067078035e-05, | |
| "loss": 0.4874, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.9164556962025316, | |
| "grad_norm": 0.2763044671677636, | |
| "learning_rate": 1.7537264104427064e-05, | |
| "loss": 0.4656, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.920506329113924, | |
| "grad_norm": 0.29848930441575344, | |
| "learning_rate": 1.748094737261012e-05, | |
| "loss": 0.4754, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.9245569620253162, | |
| "grad_norm": 0.2813936957891289, | |
| "learning_rate": 1.7424650928896726e-05, | |
| "loss": 0.4702, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.9286075949367087, | |
| "grad_norm": 0.28861730672302316, | |
| "learning_rate": 1.7368375226690712e-05, | |
| "loss": 0.4707, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.9326582278481013, | |
| "grad_norm": 0.24693368164976032, | |
| "learning_rate": 1.731212071922883e-05, | |
| "loss": 0.4738, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.9367088607594938, | |
| "grad_norm": 0.2710623013366391, | |
| "learning_rate": 1.7255887859577156e-05, | |
| "loss": 0.4744, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.9407594936708863, | |
| "grad_norm": 0.27899506630638915, | |
| "learning_rate": 1.7199677100627427e-05, | |
| "loss": 0.4805, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.9448101265822784, | |
| "grad_norm": 0.24794190806912542, | |
| "learning_rate": 1.7143488895093343e-05, | |
| "loss": 0.4827, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.948860759493671, | |
| "grad_norm": 0.2738379651196646, | |
| "learning_rate": 1.7087323695506994e-05, | |
| "loss": 0.4758, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.9529113924050634, | |
| "grad_norm": 0.2574243838368711, | |
| "learning_rate": 1.7031181954215194e-05, | |
| "loss": 0.4817, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.9569620253164555, | |
| "grad_norm": 0.32093151182428625, | |
| "learning_rate": 1.6975064123375788e-05, | |
| "loss": 0.4818, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.961012658227848, | |
| "grad_norm": 0.27264824183723835, | |
| "learning_rate": 1.6918970654954084e-05, | |
| "loss": 0.4689, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.9650632911392405, | |
| "grad_norm": 0.3826800453853356, | |
| "learning_rate": 1.686290200071915e-05, | |
| "loss": 0.4682, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.969113924050633, | |
| "grad_norm": 0.25243349645292534, | |
| "learning_rate": 1.6806858612240234e-05, | |
| "loss": 0.472, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.9731645569620255, | |
| "grad_norm": 0.3221055195117261, | |
| "learning_rate": 1.6750840940883078e-05, | |
| "loss": 0.4811, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.9772151898734176, | |
| "grad_norm": 0.3115920830162225, | |
| "learning_rate": 1.6694849437806305e-05, | |
| "loss": 0.4789, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.98126582278481, | |
| "grad_norm": 0.30484074672963696, | |
| "learning_rate": 1.663888455395778e-05, | |
| "loss": 0.4764, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.9853164556962026, | |
| "grad_norm": 0.2919317308934776, | |
| "learning_rate": 1.6582946740070995e-05, | |
| "loss": 0.4866, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.9893670886075947, | |
| "grad_norm": 0.27078260294352235, | |
| "learning_rate": 1.6527036446661396e-05, | |
| "loss": 0.4717, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.993417721518987, | |
| "grad_norm": 0.2439449818749629, | |
| "learning_rate": 1.6471154124022818e-05, | |
| "loss": 0.4842, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.9974683544303797, | |
| "grad_norm": 0.2893038867282254, | |
| "learning_rate": 1.6415300222223788e-05, | |
| "loss": 0.4833, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.0015189873417722, | |
| "grad_norm": 0.292183359435827, | |
| "learning_rate": 1.6359475191103958e-05, | |
| "loss": 0.4558, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.0055696202531648, | |
| "grad_norm": 0.4287388818809184, | |
| "learning_rate": 1.6303679480270466e-05, | |
| "loss": 0.4017, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.009620253164557, | |
| "grad_norm": 0.6405146802296977, | |
| "learning_rate": 1.624791353909428e-05, | |
| "loss": 0.4152, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.0136708860759494, | |
| "grad_norm": 0.39000129626059893, | |
| "learning_rate": 1.619217781670663e-05, | |
| "loss": 0.4079, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.017721518987342, | |
| "grad_norm": 0.4819800653144911, | |
| "learning_rate": 1.6136472761995373e-05, | |
| "loss": 0.3991, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.0217721518987344, | |
| "grad_norm": 0.3557365651832404, | |
| "learning_rate": 1.608079882360134e-05, | |
| "loss": 0.4057, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.0258227848101265, | |
| "grad_norm": 0.3928838264136881, | |
| "learning_rate": 1.60251564499148e-05, | |
| "loss": 0.4098, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.029873417721519, | |
| "grad_norm": 0.40434179498614453, | |
| "learning_rate": 1.596954608907176e-05, | |
| "loss": 0.4084, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.0339240506329115, | |
| "grad_norm": 0.30780651453264884, | |
| "learning_rate": 1.591396818895043e-05, | |
| "loss": 0.4185, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.037974683544304, | |
| "grad_norm": 0.3931479240202925, | |
| "learning_rate": 1.585842319716759e-05, | |
| "loss": 0.4159, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.042025316455696, | |
| "grad_norm": 0.33303071423313646, | |
| "learning_rate": 1.5802911561074944e-05, | |
| "loss": 0.4226, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.0460759493670886, | |
| "grad_norm": 0.33189024750192686, | |
| "learning_rate": 1.5747433727755595e-05, | |
| "loss": 0.4166, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.050126582278481, | |
| "grad_norm": 0.35020159759839736, | |
| "learning_rate": 1.5691990144020376e-05, | |
| "loss": 0.4066, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.0541772151898736, | |
| "grad_norm": 0.29372359714540297, | |
| "learning_rate": 1.5636581256404297e-05, | |
| "loss": 0.4221, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.0582278481012657, | |
| "grad_norm": 0.357782655397944, | |
| "learning_rate": 1.558120751116291e-05, | |
| "loss": 0.4064, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.062278481012658, | |
| "grad_norm": 0.3181027274281555, | |
| "learning_rate": 1.552586935426876e-05, | |
| "loss": 0.4181, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.0663291139240507, | |
| "grad_norm": 0.325968737360748, | |
| "learning_rate": 1.547056723140774e-05, | |
| "loss": 0.418, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.0703797468354432, | |
| "grad_norm": 0.32482107280557443, | |
| "learning_rate": 1.5415301587975565e-05, | |
| "loss": 0.4176, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.0744303797468353, | |
| "grad_norm": 0.3087307699702407, | |
| "learning_rate": 1.536007286907411e-05, | |
| "loss": 0.4076, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.078481012658228, | |
| "grad_norm": 0.27648293336701857, | |
| "learning_rate": 1.5304881519507896e-05, | |
| "loss": 0.4015, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.0825316455696203, | |
| "grad_norm": 0.28823527616677114, | |
| "learning_rate": 1.5249727983780453e-05, | |
| "loss": 0.4032, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.086582278481013, | |
| "grad_norm": 0.2753793130881606, | |
| "learning_rate": 1.5194612706090786e-05, | |
| "loss": 0.4065, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.090632911392405, | |
| "grad_norm": 0.2842583349395612, | |
| "learning_rate": 1.5139536130329771e-05, | |
| "loss": 0.4183, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.0946835443037974, | |
| "grad_norm": 0.3139698593956553, | |
| "learning_rate": 1.508449870007656e-05, | |
| "loss": 0.4086, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.09873417721519, | |
| "grad_norm": 0.26890735341676075, | |
| "learning_rate": 1.5029500858595056e-05, | |
| "loss": 0.4209, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.1027848101265825, | |
| "grad_norm": 0.34493076298877834, | |
| "learning_rate": 1.4974543048830328e-05, | |
| "loss": 0.4278, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.1068354430379745, | |
| "grad_norm": 0.25067460468583025, | |
| "learning_rate": 1.4919625713405e-05, | |
| "loss": 0.4316, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.110886075949367, | |
| "grad_norm": 0.31800266759966306, | |
| "learning_rate": 1.4864749294615756e-05, | |
| "loss": 0.4249, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.1149367088607596, | |
| "grad_norm": 0.2796030532912686, | |
| "learning_rate": 1.4809914234429716e-05, | |
| "loss": 0.4316, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.118987341772152, | |
| "grad_norm": 0.28097556614399005, | |
| "learning_rate": 1.4755120974480923e-05, | |
| "loss": 0.4238, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.123037974683544, | |
| "grad_norm": 0.276711665638413, | |
| "learning_rate": 1.4700369956066771e-05, | |
| "loss": 0.4091, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.1270886075949367, | |
| "grad_norm": 0.2891319331223242, | |
| "learning_rate": 1.4645661620144413e-05, | |
| "loss": 0.4004, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.131139240506329, | |
| "grad_norm": 0.27271292664078356, | |
| "learning_rate": 1.4590996407327284e-05, | |
| "loss": 0.4085, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.1351898734177217, | |
| "grad_norm": 0.2708752039695535, | |
| "learning_rate": 1.4536374757881487e-05, | |
| "loss": 0.4163, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.1392405063291138, | |
| "grad_norm": 0.2625803779346398, | |
| "learning_rate": 1.4481797111722271e-05, | |
| "loss": 0.4046, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.1432911392405063, | |
| "grad_norm": 0.2770914398005877, | |
| "learning_rate": 1.4427263908410507e-05, | |
| "loss": 0.4153, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.147341772151899, | |
| "grad_norm": 0.24729893699837818, | |
| "learning_rate": 1.4372775587149108e-05, | |
| "loss": 0.4156, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.1513924050632913, | |
| "grad_norm": 0.26274937712690305, | |
| "learning_rate": 1.4318332586779522e-05, | |
| "loss": 0.4226, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.1554430379746834, | |
| "grad_norm": 0.23027884807607785, | |
| "learning_rate": 1.4263935345778202e-05, | |
| "loss": 0.406, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.159493670886076, | |
| "grad_norm": 0.2566954956603609, | |
| "learning_rate": 1.420958430225303e-05, | |
| "loss": 0.405, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.1635443037974684, | |
| "grad_norm": 0.23870188842733345, | |
| "learning_rate": 1.415527989393985e-05, | |
| "loss": 0.4268, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.167594936708861, | |
| "grad_norm": 0.2542217701973114, | |
| "learning_rate": 1.410102255819891e-05, | |
| "loss": 0.4101, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.171645569620253, | |
| "grad_norm": 0.2297688802305136, | |
| "learning_rate": 1.404681273201131e-05, | |
| "loss": 0.4169, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.1756962025316455, | |
| "grad_norm": 0.23208207745358939, | |
| "learning_rate": 1.399265085197556e-05, | |
| "loss": 0.4178, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.179746835443038, | |
| "grad_norm": 0.24198725359335324, | |
| "learning_rate": 1.393853735430398e-05, | |
| "loss": 0.4107, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.1837974683544306, | |
| "grad_norm": 0.23566692449585475, | |
| "learning_rate": 1.3884472674819246e-05, | |
| "loss": 0.4122, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.1878481012658226, | |
| "grad_norm": 0.23009057752998352, | |
| "learning_rate": 1.3830457248950864e-05, | |
| "loss": 0.4175, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.191898734177215, | |
| "grad_norm": 0.27165907041772225, | |
| "learning_rate": 1.377649151173163e-05, | |
| "loss": 0.4175, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.1959493670886077, | |
| "grad_norm": 0.23219126326594666, | |
| "learning_rate": 1.3722575897794181e-05, | |
| "loss": 0.4167, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.308267007995278, | |
| "learning_rate": 1.3668710841367472e-05, | |
| "loss": 0.4146, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.2040506329113922, | |
| "grad_norm": 0.24362350765015298, | |
| "learning_rate": 1.361489677627324e-05, | |
| "loss": 0.4268, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.2081012658227848, | |
| "grad_norm": 0.25649431305766557, | |
| "learning_rate": 1.3561134135922585e-05, | |
| "loss": 0.4077, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.2121518987341773, | |
| "grad_norm": 0.23974743222644912, | |
| "learning_rate": 1.350742335331241e-05, | |
| "loss": 0.4233, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.21620253164557, | |
| "grad_norm": 0.24322276663098819, | |
| "learning_rate": 1.345376486102198e-05, | |
| "loss": 0.4246, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.220253164556962, | |
| "grad_norm": 0.2524623307729589, | |
| "learning_rate": 1.3400159091209414e-05, | |
| "loss": 0.4248, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.2243037974683544, | |
| "grad_norm": 0.2544289605549649, | |
| "learning_rate": 1.3346606475608216e-05, | |
| "loss": 0.4173, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.228354430379747, | |
| "grad_norm": 0.2517436273066714, | |
| "learning_rate": 1.3293107445523781e-05, | |
| "loss": 0.4302, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.2324050632911394, | |
| "grad_norm": 0.24437546900755755, | |
| "learning_rate": 1.3239662431829949e-05, | |
| "loss": 0.4149, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.2364556962025315, | |
| "grad_norm": 0.24606508495132434, | |
| "learning_rate": 1.3186271864965509e-05, | |
| "loss": 0.4172, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.240506329113924, | |
| "grad_norm": 0.26034544477073057, | |
| "learning_rate": 1.3132936174930756e-05, | |
| "loss": 0.4196, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.2445569620253165, | |
| "grad_norm": 0.24988231205698694, | |
| "learning_rate": 1.3079655791283995e-05, | |
| "loss": 0.4086, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.248607594936709, | |
| "grad_norm": 0.26776621437793613, | |
| "learning_rate": 1.3026431143138108e-05, | |
| "loss": 0.4193, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.252658227848101, | |
| "grad_norm": 0.2375731637015717, | |
| "learning_rate": 1.2973262659157114e-05, | |
| "loss": 0.4146, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.2567088607594936, | |
| "grad_norm": 0.244978924810613, | |
| "learning_rate": 1.2920150767552651e-05, | |
| "loss": 0.4199, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.260759493670886, | |
| "grad_norm": 0.25074854815687186, | |
| "learning_rate": 1.2867095896080607e-05, | |
| "loss": 0.4117, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.2648101265822786, | |
| "grad_norm": 0.2681728464688326, | |
| "learning_rate": 1.2814098472037612e-05, | |
| "loss": 0.4202, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.2688607594936707, | |
| "grad_norm": 0.2382962649747543, | |
| "learning_rate": 1.276115892225764e-05, | |
| "loss": 0.4216, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.2729113924050632, | |
| "grad_norm": 0.27795968521183745, | |
| "learning_rate": 1.2708277673108555e-05, | |
| "loss": 0.4275, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.2769620253164558, | |
| "grad_norm": 0.2357551035755549, | |
| "learning_rate": 1.2655455150488649e-05, | |
| "loss": 0.4091, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.2810126582278483, | |
| "grad_norm": 0.2589408902208313, | |
| "learning_rate": 1.2602691779823272e-05, | |
| "loss": 0.4243, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.2850632911392403, | |
| "grad_norm": 0.2290011345122997, | |
| "learning_rate": 1.2549987986061355e-05, | |
| "loss": 0.4176, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.289113924050633, | |
| "grad_norm": 0.2607625839963637, | |
| "learning_rate": 1.2497344193672005e-05, | |
| "loss": 0.403, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.2931645569620254, | |
| "grad_norm": 0.24882122318002547, | |
| "learning_rate": 1.2444760826641092e-05, | |
| "loss": 0.414, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.297215189873418, | |
| "grad_norm": 0.2651345018666523, | |
| "learning_rate": 1.2392238308467817e-05, | |
| "loss": 0.4173, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.30126582278481, | |
| "grad_norm": 0.23418704458099618, | |
| "learning_rate": 1.2339777062161326e-05, | |
| "loss": 0.4246, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.3053164556962025, | |
| "grad_norm": 0.2620879973047422, | |
| "learning_rate": 1.2287377510237293e-05, | |
| "loss": 0.4204, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.309367088607595, | |
| "grad_norm": 0.2256133547326787, | |
| "learning_rate": 1.2235040074714488e-05, | |
| "loss": 0.4191, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.3134177215189875, | |
| "grad_norm": 0.2488387438125186, | |
| "learning_rate": 1.2182765177111434e-05, | |
| "loss": 0.4208, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.3174683544303796, | |
| "grad_norm": 0.22881522051173686, | |
| "learning_rate": 1.213055323844297e-05, | |
| "loss": 0.4089, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.321518987341772, | |
| "grad_norm": 0.2492282772486322, | |
| "learning_rate": 1.2078404679216864e-05, | |
| "loss": 0.4232, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.3255696202531646, | |
| "grad_norm": 0.26899723993922114, | |
| "learning_rate": 1.2026319919430458e-05, | |
| "loss": 0.4112, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.329620253164557, | |
| "grad_norm": 0.2553620436787898, | |
| "learning_rate": 1.1974299378567227e-05, | |
| "loss": 0.416, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.333670886075949, | |
| "grad_norm": 0.24862664853191566, | |
| "learning_rate": 1.1922343475593462e-05, | |
| "loss": 0.4149, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.3377215189873417, | |
| "grad_norm": 0.2567726127178886, | |
| "learning_rate": 1.187045262895488e-05, | |
| "loss": 0.4174, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.3417721518987342, | |
| "grad_norm": 0.30776014500523113, | |
| "learning_rate": 1.1818627256573203e-05, | |
| "loss": 0.4024, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.3458227848101267, | |
| "grad_norm": 0.24246952412399594, | |
| "learning_rate": 1.1766867775842864e-05, | |
| "loss": 0.4135, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.349873417721519, | |
| "grad_norm": 0.28921455644413563, | |
| "learning_rate": 1.1715174603627615e-05, | |
| "loss": 0.4168, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.3539240506329113, | |
| "grad_norm": 0.27341121915144334, | |
| "learning_rate": 1.1663548156257147e-05, | |
| "loss": 0.4119, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.357974683544304, | |
| "grad_norm": 0.24157935925064114, | |
| "learning_rate": 1.161198884952377e-05, | |
| "loss": 0.412, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.3620253164556964, | |
| "grad_norm": 0.2682560402716377, | |
| "learning_rate": 1.1560497098679056e-05, | |
| "loss": 0.4239, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.3660759493670884, | |
| "grad_norm": 0.24965206450469038, | |
| "learning_rate": 1.1509073318430479e-05, | |
| "loss": 0.4132, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.370126582278481, | |
| "grad_norm": 0.25351031909033794, | |
| "learning_rate": 1.1457717922938116e-05, | |
| "loss": 0.4295, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.3741772151898735, | |
| "grad_norm": 0.26113421007043197, | |
| "learning_rate": 1.1406431325811233e-05, | |
| "loss": 0.4174, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.378227848101266, | |
| "grad_norm": 0.27981607823239624, | |
| "learning_rate": 1.135521394010506e-05, | |
| "loss": 0.4167, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.382278481012658, | |
| "grad_norm": 0.23846038254912133, | |
| "learning_rate": 1.1304066178317367e-05, | |
| "loss": 0.4214, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.3863291139240506, | |
| "grad_norm": 0.27233418305862495, | |
| "learning_rate": 1.1252988452385199e-05, | |
| "loss": 0.4083, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.390379746835443, | |
| "grad_norm": 0.2588421707282755, | |
| "learning_rate": 1.1201981173681536e-05, | |
| "loss": 0.4204, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.3944303797468356, | |
| "grad_norm": 0.2570394300812571, | |
| "learning_rate": 1.1151044753011991e-05, | |
| "loss": 0.4201, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.3984810126582277, | |
| "grad_norm": 0.24761949566787725, | |
| "learning_rate": 1.1100179600611491e-05, | |
| "loss": 0.4185, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.40253164556962, | |
| "grad_norm": 0.2486705406466546, | |
| "learning_rate": 1.1049386126140985e-05, | |
| "loss": 0.4127, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.4065822784810127, | |
| "grad_norm": 0.2644145764037628, | |
| "learning_rate": 1.0998664738684128e-05, | |
| "loss": 0.4075, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.410632911392405, | |
| "grad_norm": 0.2408390578931503, | |
| "learning_rate": 1.0948015846744e-05, | |
| "loss": 0.4164, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.4146835443037973, | |
| "grad_norm": 0.26164206829670067, | |
| "learning_rate": 1.0897439858239832e-05, | |
| "loss": 0.4188, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.41873417721519, | |
| "grad_norm": 0.24724214061227429, | |
| "learning_rate": 1.0846937180503652e-05, | |
| "loss": 0.4115, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.4227848101265823, | |
| "grad_norm": 0.25431948698305656, | |
| "learning_rate": 1.0796508220277117e-05, | |
| "loss": 0.4093, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.426835443037975, | |
| "grad_norm": 0.24655570889215117, | |
| "learning_rate": 1.0746153383708107e-05, | |
| "loss": 0.4084, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.430886075949367, | |
| "grad_norm": 0.22637941004332657, | |
| "learning_rate": 1.0695873076347579e-05, | |
| "loss": 0.4156, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.4349367088607594, | |
| "grad_norm": 0.25855325916706956, | |
| "learning_rate": 1.0645667703146205e-05, | |
| "loss": 0.411, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.438987341772152, | |
| "grad_norm": 0.23680441314171682, | |
| "learning_rate": 1.0595537668451161e-05, | |
| "loss": 0.4144, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.4430379746835444, | |
| "grad_norm": 0.288873723647931, | |
| "learning_rate": 1.0545483376002854e-05, | |
| "loss": 0.4198, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.4470886075949365, | |
| "grad_norm": 0.23915771006144626, | |
| "learning_rate": 1.0495505228931676e-05, | |
| "loss": 0.4098, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.451139240506329, | |
| "grad_norm": 0.2737761284337262, | |
| "learning_rate": 1.044560362975474e-05, | |
| "loss": 0.4105, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.4551898734177215, | |
| "grad_norm": 0.26020823797270015, | |
| "learning_rate": 1.0395778980372695e-05, | |
| "loss": 0.4182, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.459240506329114, | |
| "grad_norm": 0.22579604999796674, | |
| "learning_rate": 1.0346031682066381e-05, | |
| "loss": 0.4078, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.463291139240506, | |
| "grad_norm": 0.23397779978133962, | |
| "learning_rate": 1.0296362135493724e-05, | |
| "loss": 0.4219, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.4673417721518986, | |
| "grad_norm": 0.22284483884897047, | |
| "learning_rate": 1.0246770740686422e-05, | |
| "loss": 0.4122, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.471392405063291, | |
| "grad_norm": 0.23308368128194587, | |
| "learning_rate": 1.0197257897046743e-05, | |
| "loss": 0.4144, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.4754430379746837, | |
| "grad_norm": 0.22348651516165416, | |
| "learning_rate": 1.014782400334433e-05, | |
| "loss": 0.4064, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.479493670886076, | |
| "grad_norm": 0.22448152832942533, | |
| "learning_rate": 1.009846945771296e-05, | |
| "loss": 0.4107, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.4835443037974683, | |
| "grad_norm": 0.23592451253463717, | |
| "learning_rate": 1.0049194657647363e-05, | |
| "loss": 0.4198, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.487594936708861, | |
| "grad_norm": 0.2837071401232295, | |
| "learning_rate": 1.0000000000000006e-05, | |
| "loss": 0.4213, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.4916455696202533, | |
| "grad_norm": 0.22658549406727618, | |
| "learning_rate": 9.950885880977891e-06, | |
| "loss": 0.4186, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.4956962025316454, | |
| "grad_norm": 0.22800204156713488, | |
| "learning_rate": 9.901852696139382e-06, | |
| "loss": 0.431, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.499746835443038, | |
| "grad_norm": 0.24901185418217667, | |
| "learning_rate": 9.852900840391027e-06, | |
| "loss": 0.4262, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.5037974683544304, | |
| "grad_norm": 0.23894269704674706, | |
| "learning_rate": 9.804030707984313e-06, | |
| "loss": 0.4086, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.507848101265823, | |
| "grad_norm": 0.23708588509530604, | |
| "learning_rate": 9.755242692512599e-06, | |
| "loss": 0.4116, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.5118987341772154, | |
| "grad_norm": 0.24528497674057603, | |
| "learning_rate": 9.70653718690782e-06, | |
| "loss": 0.4087, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.5159493670886075, | |
| "grad_norm": 0.22894842494049852, | |
| "learning_rate": 9.657914583437454e-06, | |
| "loss": 0.4115, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 0.25380579728332203, | |
| "learning_rate": 9.609375273701246e-06, | |
| "loss": 0.4268, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.5240506329113925, | |
| "grad_norm": 0.23316294992730854, | |
| "learning_rate": 9.560919648628133e-06, | |
| "loss": 0.4128, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.5281012658227846, | |
| "grad_norm": 0.23580209724821408, | |
| "learning_rate": 9.512548098473047e-06, | |
| "loss": 0.4155, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.532151898734177, | |
| "grad_norm": 0.2540105571637628, | |
| "learning_rate": 9.464261012813825e-06, | |
| "loss": 0.4158, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.5362025316455696, | |
| "grad_norm": 0.22524760223632734, | |
| "learning_rate": 9.416058780547987e-06, | |
| "loss": 0.4146, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.540253164556962, | |
| "grad_norm": 0.2277901771775277, | |
| "learning_rate": 9.367941789889714e-06, | |
| "loss": 0.415, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.5443037974683547, | |
| "grad_norm": 0.22577518782390343, | |
| "learning_rate": 9.319910428366607e-06, | |
| "loss": 0.4217, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.5483544303797467, | |
| "grad_norm": 0.2175416082327203, | |
| "learning_rate": 9.271965082816667e-06, | |
| "loss": 0.4273, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.5524050632911393, | |
| "grad_norm": 0.23317608598560186, | |
| "learning_rate": 9.224106139385111e-06, | |
| "loss": 0.4221, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.5564556962025318, | |
| "grad_norm": 0.24601282797386567, | |
| "learning_rate": 9.176333983521291e-06, | |
| "loss": 0.4206, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.560506329113924, | |
| "grad_norm": 0.24308692475906507, | |
| "learning_rate": 9.12864899997558e-06, | |
| "loss": 0.4222, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.5645569620253164, | |
| "grad_norm": 0.22926730360327544, | |
| "learning_rate": 9.08105157279628e-06, | |
| "loss": 0.4159, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.568607594936709, | |
| "grad_norm": 0.26225859555362585, | |
| "learning_rate": 9.03354208532653e-06, | |
| "loss": 0.4199, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.5726582278481014, | |
| "grad_norm": 0.23241415479050753, | |
| "learning_rate": 8.986120920201205e-06, | |
| "loss": 0.4228, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.576708860759494, | |
| "grad_norm": 0.23202425826349732, | |
| "learning_rate": 8.938788459343852e-06, | |
| "loss": 0.4156, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 3.580759493670886, | |
| "grad_norm": 0.24328469880457745, | |
| "learning_rate": 8.8915450839636e-06, | |
| "loss": 0.4075, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 3.5848101265822785, | |
| "grad_norm": 0.2349804396314518, | |
| "learning_rate": 8.844391174552116e-06, | |
| "loss": 0.4237, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 3.588860759493671, | |
| "grad_norm": 0.2527982014761089, | |
| "learning_rate": 8.797327110880479e-06, | |
| "loss": 0.4126, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 3.592911392405063, | |
| "grad_norm": 0.2471290002240711, | |
| "learning_rate": 8.750353271996206e-06, | |
| "loss": 0.4024, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 3.5969620253164556, | |
| "grad_norm": 0.23516849230653505, | |
| "learning_rate": 8.703470036220132e-06, | |
| "loss": 0.4199, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 3.601012658227848, | |
| "grad_norm": 0.22477549350550297, | |
| "learning_rate": 8.656677781143394e-06, | |
| "loss": 0.4151, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 3.6050632911392406, | |
| "grad_norm": 0.2715290853641857, | |
| "learning_rate": 8.609976883624377e-06, | |
| "loss": 0.4217, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.609113924050633, | |
| "grad_norm": 0.2439671739231745, | |
| "learning_rate": 8.563367719785698e-06, | |
| "loss": 0.4179, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.613164556962025, | |
| "grad_norm": 0.22435655523306888, | |
| "learning_rate": 8.516850665011138e-06, | |
| "loss": 0.4014, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.6172151898734177, | |
| "grad_norm": 0.2649896697483519, | |
| "learning_rate": 8.47042609394269e-06, | |
| "loss": 0.4264, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.6212658227848102, | |
| "grad_norm": 0.2520517710281064, | |
| "learning_rate": 8.424094380477432e-06, | |
| "loss": 0.4207, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.6253164556962023, | |
| "grad_norm": 0.23995721739748685, | |
| "learning_rate": 8.37785589776465e-06, | |
| "loss": 0.4331, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.629367088607595, | |
| "grad_norm": 0.22714763458819415, | |
| "learning_rate": 8.331711018202694e-06, | |
| "loss": 0.4191, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.6334177215189873, | |
| "grad_norm": 0.2213990563435402, | |
| "learning_rate": 8.285660113436104e-06, | |
| "loss": 0.4148, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.63746835443038, | |
| "grad_norm": 0.25045323986475415, | |
| "learning_rate": 8.239703554352527e-06, | |
| "loss": 0.4128, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.6415189873417724, | |
| "grad_norm": 0.209656478652168, | |
| "learning_rate": 8.193841711079775e-06, | |
| "loss": 0.4149, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.6455696202531644, | |
| "grad_norm": 0.2163158110944908, | |
| "learning_rate": 8.148074952982828e-06, | |
| "loss": 0.4013, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.649620253164557, | |
| "grad_norm": 0.23295745873847007, | |
| "learning_rate": 8.102403648660859e-06, | |
| "loss": 0.4189, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.6536708860759495, | |
| "grad_norm": 0.23137395377970035, | |
| "learning_rate": 8.056828165944282e-06, | |
| "loss": 0.4149, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.6577215189873415, | |
| "grad_norm": 0.22401234293706565, | |
| "learning_rate": 8.011348871891762e-06, | |
| "loss": 0.4211, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.661772151898734, | |
| "grad_norm": 0.23794233918064692, | |
| "learning_rate": 7.965966132787287e-06, | |
| "loss": 0.4218, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.6658227848101266, | |
| "grad_norm": 0.21841298535534123, | |
| "learning_rate": 7.920680314137189e-06, | |
| "loss": 0.4144, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.669873417721519, | |
| "grad_norm": 0.23031236227950844, | |
| "learning_rate": 7.875491780667246e-06, | |
| "loss": 0.423, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.6739240506329116, | |
| "grad_norm": 0.21997196834727709, | |
| "learning_rate": 7.830400896319667e-06, | |
| "loss": 0.4189, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.6779746835443037, | |
| "grad_norm": 0.2193877823817885, | |
| "learning_rate": 7.785408024250259e-06, | |
| "loss": 0.4216, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.682025316455696, | |
| "grad_norm": 0.22784323033308543, | |
| "learning_rate": 7.74051352682542e-06, | |
| "loss": 0.41, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.6860759493670887, | |
| "grad_norm": 0.2110364447805807, | |
| "learning_rate": 7.695717765619257e-06, | |
| "loss": 0.4191, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.690126582278481, | |
| "grad_norm": 0.21202241707789757, | |
| "learning_rate": 7.651021101410673e-06, | |
| "loss": 0.4238, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.6941772151898733, | |
| "grad_norm": 0.22357752887348697, | |
| "learning_rate": 7.606423894180464e-06, | |
| "loss": 0.4124, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.698227848101266, | |
| "grad_norm": 0.21063077489491633, | |
| "learning_rate": 7.56192650310839e-06, | |
| "loss": 0.4179, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.7022784810126583, | |
| "grad_norm": 0.23903542018274826, | |
| "learning_rate": 7.517529286570349e-06, | |
| "loss": 0.412, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.706329113924051, | |
| "grad_norm": 0.21127505329479815, | |
| "learning_rate": 7.473232602135387e-06, | |
| "loss": 0.4161, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.710379746835443, | |
| "grad_norm": 0.2215793412064054, | |
| "learning_rate": 7.429036806562935e-06, | |
| "loss": 0.4059, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.7144303797468354, | |
| "grad_norm": 0.21759408966787905, | |
| "learning_rate": 7.3849422557998455e-06, | |
| "loss": 0.4137, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.718481012658228, | |
| "grad_norm": 0.20943276647208486, | |
| "learning_rate": 7.340949304977567e-06, | |
| "loss": 0.4161, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.72253164556962, | |
| "grad_norm": 0.22884365488661843, | |
| "learning_rate": 7.297058308409282e-06, | |
| "loss": 0.4289, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.7265822784810125, | |
| "grad_norm": 0.23885052454790517, | |
| "learning_rate": 7.25326961958704e-06, | |
| "loss": 0.4103, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.730632911392405, | |
| "grad_norm": 0.21266592073714152, | |
| "learning_rate": 7.209583591178921e-06, | |
| "loss": 0.4114, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.7346835443037976, | |
| "grad_norm": 0.23470405797765628, | |
| "learning_rate": 7.1660005750261925e-06, | |
| "loss": 0.4324, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.73873417721519, | |
| "grad_norm": 0.25058079538140254, | |
| "learning_rate": 7.1225209221404765e-06, | |
| "loss": 0.4111, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.742784810126582, | |
| "grad_norm": 0.222732028345954, | |
| "learning_rate": 7.079144982700909e-06, | |
| "loss": 0.4148, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.7468354430379747, | |
| "grad_norm": 0.27007402715285894, | |
| "learning_rate": 7.0358731060513695e-06, | |
| "loss": 0.3989, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.750886075949367, | |
| "grad_norm": 0.23353294458660478, | |
| "learning_rate": 6.99270564069757e-06, | |
| "loss": 0.4173, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.7549367088607593, | |
| "grad_norm": 0.22159547255301118, | |
| "learning_rate": 6.949642934304375e-06, | |
| "loss": 0.4028, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.7589873417721518, | |
| "grad_norm": 0.22828919328155972, | |
| "learning_rate": 6.906685333692871e-06, | |
| "loss": 0.4247, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.7630379746835443, | |
| "grad_norm": 0.23118277604580306, | |
| "learning_rate": 6.86383318483769e-06, | |
| "loss": 0.4149, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.767088607594937, | |
| "grad_norm": 0.21273100254565397, | |
| "learning_rate": 6.821086832864139e-06, | |
| "loss": 0.4224, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.7711392405063293, | |
| "grad_norm": 0.23219951763411456, | |
| "learning_rate": 6.77844662204546e-06, | |
| "loss": 0.4084, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.7751898734177214, | |
| "grad_norm": 0.226925716981963, | |
| "learning_rate": 6.7359128958000455e-06, | |
| "loss": 0.4027, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 3.779240506329114, | |
| "grad_norm": 0.21473361282129558, | |
| "learning_rate": 6.693485996688695e-06, | |
| "loss": 0.4106, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 3.7832911392405064, | |
| "grad_norm": 0.23236362423744558, | |
| "learning_rate": 6.651166266411801e-06, | |
| "loss": 0.3986, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 3.7873417721518985, | |
| "grad_norm": 0.23269266573674577, | |
| "learning_rate": 6.6089540458066725e-06, | |
| "loss": 0.4042, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 3.791392405063291, | |
| "grad_norm": 0.2315704181853091, | |
| "learning_rate": 6.566849674844711e-06, | |
| "loss": 0.414, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 3.7954430379746835, | |
| "grad_norm": 0.2214571438442625, | |
| "learning_rate": 6.524853492628747e-06, | |
| "loss": 0.4102, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 3.799493670886076, | |
| "grad_norm": 0.24337763709699473, | |
| "learning_rate": 6.4829658373902536e-06, | |
| "loss": 0.4144, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.8035443037974686, | |
| "grad_norm": 0.2396319997263314, | |
| "learning_rate": 6.441187046486648e-06, | |
| "loss": 0.4173, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.8075949367088606, | |
| "grad_norm": 0.23684991729507926, | |
| "learning_rate": 6.399517456398567e-06, | |
| "loss": 0.4049, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.811645569620253, | |
| "grad_norm": 0.23968732498055503, | |
| "learning_rate": 6.357957402727164e-06, | |
| "loss": 0.4203, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 3.8156962025316457, | |
| "grad_norm": 0.24061892599113532, | |
| "learning_rate": 6.316507220191395e-06, | |
| "loss": 0.4185, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 3.8197468354430377, | |
| "grad_norm": 0.2251851377384505, | |
| "learning_rate": 6.275167242625331e-06, | |
| "loss": 0.4184, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 3.8237974683544302, | |
| "grad_norm": 0.20821558885180502, | |
| "learning_rate": 6.233937802975471e-06, | |
| "loss": 0.4165, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 3.8278481012658228, | |
| "grad_norm": 0.23944763525235213, | |
| "learning_rate": 6.192819233298046e-06, | |
| "loss": 0.4164, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 3.8318987341772153, | |
| "grad_norm": 0.2245392447971014, | |
| "learning_rate": 6.151811864756383e-06, | |
| "loss": 0.4101, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 3.835949367088608, | |
| "grad_norm": 0.23192260049069344, | |
| "learning_rate": 6.1109160276181655e-06, | |
| "loss": 0.4197, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 0.21809494368236756, | |
| "learning_rate": 6.070132051252868e-06, | |
| "loss": 0.4218, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 3.8440506329113924, | |
| "grad_norm": 0.22848808394465095, | |
| "learning_rate": 6.0294602641290034e-06, | |
| "loss": 0.4052, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 3.848101265822785, | |
| "grad_norm": 0.20138747488615916, | |
| "learning_rate": 5.988900993811575e-06, | |
| "loss": 0.4271, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.852151898734177, | |
| "grad_norm": 0.22893895094104608, | |
| "learning_rate": 5.948454566959363e-06, | |
| "loss": 0.4012, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 3.8562025316455695, | |
| "grad_norm": 0.23675805788204565, | |
| "learning_rate": 5.908121309322328e-06, | |
| "loss": 0.4234, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 3.860253164556962, | |
| "grad_norm": 0.2175216033208925, | |
| "learning_rate": 5.867901545738976e-06, | |
| "loss": 0.406, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 3.8643037974683545, | |
| "grad_norm": 0.22309328743752352, | |
| "learning_rate": 5.827795600133774e-06, | |
| "loss": 0.4112, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 3.868354430379747, | |
| "grad_norm": 0.2325710304434826, | |
| "learning_rate": 5.787803795514466e-06, | |
| "loss": 0.4157, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 3.872405063291139, | |
| "grad_norm": 0.22639122172184742, | |
| "learning_rate": 5.747926453969576e-06, | |
| "loss": 0.4011, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 3.8764556962025316, | |
| "grad_norm": 0.207762678314419, | |
| "learning_rate": 5.708163896665708e-06, | |
| "loss": 0.4177, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 3.880506329113924, | |
| "grad_norm": 0.20941446635391753, | |
| "learning_rate": 5.668516443845047e-06, | |
| "loss": 0.4089, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 3.884556962025316, | |
| "grad_norm": 0.2016024119168464, | |
| "learning_rate": 5.6289844148227225e-06, | |
| "loss": 0.4032, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 3.8886075949367087, | |
| "grad_norm": 0.21106573967197506, | |
| "learning_rate": 5.5895681279842615e-06, | |
| "loss": 0.4229, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.8926582278481012, | |
| "grad_norm": 0.20938293844352165, | |
| "learning_rate": 5.550267900783019e-06, | |
| "loss": 0.4102, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 3.8967088607594937, | |
| "grad_norm": 0.2054727514311623, | |
| "learning_rate": 5.511084049737623e-06, | |
| "loss": 0.4083, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 3.9007594936708863, | |
| "grad_norm": 0.21082234034973055, | |
| "learning_rate": 5.4720168904294215e-06, | |
| "loss": 0.4025, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 3.9048101265822783, | |
| "grad_norm": 0.21828389716217125, | |
| "learning_rate": 5.433066737499948e-06, | |
| "loss": 0.4163, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 3.908860759493671, | |
| "grad_norm": 0.21741859542928438, | |
| "learning_rate": 5.394233904648376e-06, | |
| "loss": 0.4097, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 3.9129113924050634, | |
| "grad_norm": 0.2036628056226865, | |
| "learning_rate": 5.355518704628997e-06, | |
| "loss": 0.4149, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 3.9169620253164554, | |
| "grad_norm": 0.22609446431050897, | |
| "learning_rate": 5.316921449248731e-06, | |
| "loss": 0.416, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 3.921012658227848, | |
| "grad_norm": 0.19809165048987415, | |
| "learning_rate": 5.278442449364538e-06, | |
| "loss": 0.4258, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 3.9250632911392405, | |
| "grad_norm": 0.20903757304729806, | |
| "learning_rate": 5.240082014881016e-06, | |
| "loss": 0.4089, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 3.929113924050633, | |
| "grad_norm": 0.2177478018514132, | |
| "learning_rate": 5.201840454747822e-06, | |
| "loss": 0.3946, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.9331645569620255, | |
| "grad_norm": 0.2063122063537382, | |
| "learning_rate": 5.163718076957223e-06, | |
| "loss": 0.4179, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 3.9372151898734176, | |
| "grad_norm": 0.22167598918014525, | |
| "learning_rate": 5.125715188541609e-06, | |
| "loss": 0.4272, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 3.94126582278481, | |
| "grad_norm": 0.2059865007268975, | |
| "learning_rate": 5.087832095571021e-06, | |
| "loss": 0.4082, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 3.9453164556962026, | |
| "grad_norm": 0.2024651577652389, | |
| "learning_rate": 5.0500691031506766e-06, | |
| "loss": 0.4342, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 3.9493670886075947, | |
| "grad_norm": 0.2253317018411249, | |
| "learning_rate": 5.01242651541854e-06, | |
| "loss": 0.4169, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.953417721518987, | |
| "grad_norm": 0.21989367340190785, | |
| "learning_rate": 4.974904635542815e-06, | |
| "loss": 0.4051, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 3.9574683544303797, | |
| "grad_norm": 0.2099645783074133, | |
| "learning_rate": 4.937503765719582e-06, | |
| "loss": 0.4068, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 3.961518987341772, | |
| "grad_norm": 0.21267115678868861, | |
| "learning_rate": 4.900224207170299e-06, | |
| "loss": 0.4001, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 3.9655696202531647, | |
| "grad_norm": 0.22541040516362024, | |
| "learning_rate": 4.8630662601394065e-06, | |
| "loss": 0.4188, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 3.969620253164557, | |
| "grad_norm": 0.21418806896324039, | |
| "learning_rate": 4.8260302238918995e-06, | |
| "loss": 0.4287, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.9736708860759493, | |
| "grad_norm": 0.20741973897264557, | |
| "learning_rate": 4.789116396710924e-06, | |
| "loss": 0.4227, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 3.977721518987342, | |
| "grad_norm": 0.2093470804808695, | |
| "learning_rate": 4.752325075895368e-06, | |
| "loss": 0.4155, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 3.981772151898734, | |
| "grad_norm": 0.22312326190880916, | |
| "learning_rate": 4.715656557757473e-06, | |
| "loss": 0.3976, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 3.9858227848101264, | |
| "grad_norm": 0.2199180688840271, | |
| "learning_rate": 4.679111137620442e-06, | |
| "loss": 0.4216, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 3.989873417721519, | |
| "grad_norm": 0.1995684398388577, | |
| "learning_rate": 4.6426891098160585e-06, | |
| "loss": 0.4327, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 3.9939240506329114, | |
| "grad_norm": 0.2064085472983062, | |
| "learning_rate": 4.6063907676823474e-06, | |
| "loss": 0.4051, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 3.997974683544304, | |
| "grad_norm": 0.22533187015405287, | |
| "learning_rate": 4.570216403561141e-06, | |
| "loss": 0.4035, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.002025316455696, | |
| "grad_norm": 0.23383029925732315, | |
| "learning_rate": 4.534166308795815e-06, | |
| "loss": 0.3954, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.006075949367089, | |
| "grad_norm": 0.38174957203950993, | |
| "learning_rate": 4.498240773728859e-06, | |
| "loss": 0.3755, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.010126582278481, | |
| "grad_norm": 0.25485253160493926, | |
| "learning_rate": 4.462440087699609e-06, | |
| "loss": 0.3655, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.014177215189873, | |
| "grad_norm": 0.3354595309853449, | |
| "learning_rate": 4.426764539041861e-06, | |
| "loss": 0.378, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.018227848101266, | |
| "grad_norm": 0.386395627276107, | |
| "learning_rate": 4.391214415081582e-06, | |
| "loss": 0.3667, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.022278481012658, | |
| "grad_norm": 0.30723388314085925, | |
| "learning_rate": 4.355790002134579e-06, | |
| "loss": 0.3866, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.02632911392405, | |
| "grad_norm": 0.3085180578178756, | |
| "learning_rate": 4.320491585504207e-06, | |
| "loss": 0.3674, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.030379746835443, | |
| "grad_norm": 0.33714406822492904, | |
| "learning_rate": 4.2853194494790615e-06, | |
| "loss": 0.3583, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.034430379746835, | |
| "grad_norm": 0.299137326578124, | |
| "learning_rate": 4.250273877330691e-06, | |
| "loss": 0.3783, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.038481012658228, | |
| "grad_norm": 0.22329026458726728, | |
| "learning_rate": 4.215355151311313e-06, | |
| "loss": 0.3769, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.04253164556962, | |
| "grad_norm": 0.26793845532691984, | |
| "learning_rate": 4.180563552651542e-06, | |
| "loss": 0.3473, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.046582278481012, | |
| "grad_norm": 0.33626885054521666, | |
| "learning_rate": 4.145899361558147e-06, | |
| "loss": 0.3757, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.050632911392405, | |
| "grad_norm": 0.28426618636167433, | |
| "learning_rate": 4.111362857211738e-06, | |
| "loss": 0.3695, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.054683544303797, | |
| "grad_norm": 0.22195107257668395, | |
| "learning_rate": 4.076954317764592e-06, | |
| "loss": 0.3649, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.0587341772151895, | |
| "grad_norm": 0.26785142882777535, | |
| "learning_rate": 4.042674020338335e-06, | |
| "loss": 0.3654, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.062784810126582, | |
| "grad_norm": 0.2625421227461147, | |
| "learning_rate": 4.0085222410217835e-06, | |
| "loss": 0.3709, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.0668354430379745, | |
| "grad_norm": 0.2631886694870951, | |
| "learning_rate": 3.974499254868674e-06, | |
| "loss": 0.3656, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.0708860759493675, | |
| "grad_norm": 0.20826551800005616, | |
| "learning_rate": 3.940605335895451e-06, | |
| "loss": 0.3828, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.0749367088607595, | |
| "grad_norm": 0.2684905782015678, | |
| "learning_rate": 3.90684075707908e-06, | |
| "loss": 0.3801, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.078987341772152, | |
| "grad_norm": 0.2543442983997565, | |
| "learning_rate": 3.8732057903548505e-06, | |
| "loss": 0.365, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.083037974683545, | |
| "grad_norm": 0.2520058582774448, | |
| "learning_rate": 3.8397007066141375e-06, | |
| "loss": 0.3665, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.087088607594937, | |
| "grad_norm": 0.21152238316758754, | |
| "learning_rate": 3.806325775702304e-06, | |
| "loss": 0.3846, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.091139240506329, | |
| "grad_norm": 0.24916557840095122, | |
| "learning_rate": 3.773081266416434e-06, | |
| "loss": 0.3545, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.095189873417722, | |
| "grad_norm": 0.24467551009361926, | |
| "learning_rate": 3.739967446503245e-06, | |
| "loss": 0.3688, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.099240506329114, | |
| "grad_norm": 0.22433643051439828, | |
| "learning_rate": 3.706984582656894e-06, | |
| "loss": 0.3727, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.103291139240507, | |
| "grad_norm": 0.22135742282350201, | |
| "learning_rate": 3.6741329405168237e-06, | |
| "loss": 0.3546, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.107341772151899, | |
| "grad_norm": 0.23302536731241344, | |
| "learning_rate": 3.641412784665648e-06, | |
| "loss": 0.3627, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.111392405063291, | |
| "grad_norm": 0.23825820469848602, | |
| "learning_rate": 3.608824378627005e-06, | |
| "loss": 0.3908, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.115443037974684, | |
| "grad_norm": 0.23520368025775154, | |
| "learning_rate": 3.5763679848634337e-06, | |
| "loss": 0.3715, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.119493670886076, | |
| "grad_norm": 0.21767111507708836, | |
| "learning_rate": 3.544043864774269e-06, | |
| "loss": 0.3869, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.123544303797468, | |
| "grad_norm": 0.21089851143911936, | |
| "learning_rate": 3.5118522786935282e-06, | |
| "loss": 0.3792, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.127594936708861, | |
| "grad_norm": 0.251215748387612, | |
| "learning_rate": 3.479793485887819e-06, | |
| "loss": 0.3734, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.131645569620253, | |
| "grad_norm": 0.21834712244561058, | |
| "learning_rate": 3.4478677445542653e-06, | |
| "loss": 0.3703, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.135696202531646, | |
| "grad_norm": 0.2279142889503118, | |
| "learning_rate": 3.4160753118183767e-06, | |
| "loss": 0.3771, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.139746835443038, | |
| "grad_norm": 0.21345342388834926, | |
| "learning_rate": 3.3844164437320527e-06, | |
| "loss": 0.3659, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.14379746835443, | |
| "grad_norm": 0.2309406083196365, | |
| "learning_rate": 3.3528913952714558e-06, | |
| "loss": 0.3594, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.147848101265823, | |
| "grad_norm": 0.23063299884765595, | |
| "learning_rate": 3.321500420335e-06, | |
| "loss": 0.3664, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.151898734177215, | |
| "grad_norm": 0.21742164750394125, | |
| "learning_rate": 3.290243771741275e-06, | |
| "loss": 0.3824, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.155949367088607, | |
| "grad_norm": 0.2223866988319986, | |
| "learning_rate": 3.2591217012270325e-06, | |
| "loss": 0.3582, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 0.21280159557629047, | |
| "learning_rate": 3.228134459445149e-06, | |
| "loss": 0.3643, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.164050632911392, | |
| "grad_norm": 0.20637562932794526, | |
| "learning_rate": 3.1972822959626205e-06, | |
| "loss": 0.3793, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.168101265822785, | |
| "grad_norm": 0.21897310073601947, | |
| "learning_rate": 3.166565459258513e-06, | |
| "loss": 0.3779, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.172151898734177, | |
| "grad_norm": 0.21302999454045682, | |
| "learning_rate": 3.1359841967220193e-06, | |
| "loss": 0.3869, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.176202531645569, | |
| "grad_norm": 0.21171192395807253, | |
| "learning_rate": 3.105538754650419e-06, | |
| "loss": 0.3765, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.180253164556962, | |
| "grad_norm": 0.20050435107605363, | |
| "learning_rate": 3.07522937824712e-06, | |
| "loss": 0.3782, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.184303797468354, | |
| "grad_norm": 0.21614543714119977, | |
| "learning_rate": 3.0450563116196697e-06, | |
| "loss": 0.3645, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.188354430379746, | |
| "grad_norm": 0.21162064917406376, | |
| "learning_rate": 3.0150197977778008e-06, | |
| "loss": 0.3764, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.192405063291139, | |
| "grad_norm": 0.23302613588742882, | |
| "learning_rate": 2.985120078631465e-06, | |
| "loss": 0.3782, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.1964556962025314, | |
| "grad_norm": 0.20512339617170444, | |
| "learning_rate": 2.9553573949888893e-06, | |
| "loss": 0.3724, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.200506329113924, | |
| "grad_norm": 0.2059723565213932, | |
| "learning_rate": 2.9257319865546384e-06, | |
| "loss": 0.3688, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.2045569620253165, | |
| "grad_norm": 0.21614949937773495, | |
| "learning_rate": 2.896244091927678e-06, | |
| "loss": 0.378, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.2086075949367086, | |
| "grad_norm": 0.209332276856673, | |
| "learning_rate": 2.8668939485994584e-06, | |
| "loss": 0.3659, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.2126582278481015, | |
| "grad_norm": 0.21178681908852773, | |
| "learning_rate": 2.837681792951994e-06, | |
| "loss": 0.3534, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.216708860759494, | |
| "grad_norm": 0.2070364856152208, | |
| "learning_rate": 2.808607860255981e-06, | |
| "loss": 0.3796, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.220759493670886, | |
| "grad_norm": 0.20009534086523942, | |
| "learning_rate": 2.7796723846688634e-06, | |
| "loss": 0.3741, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.224810126582279, | |
| "grad_norm": 0.21253796738678016, | |
| "learning_rate": 2.7508755992329937e-06, | |
| "loss": 0.3694, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.228860759493671, | |
| "grad_norm": 0.21097440012940472, | |
| "learning_rate": 2.722217735873718e-06, | |
| "loss": 0.3655, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.232911392405064, | |
| "grad_norm": 0.20788182132024463, | |
| "learning_rate": 2.6936990253975315e-06, | |
| "loss": 0.3645, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.236962025316456, | |
| "grad_norm": 0.2047893388293901, | |
| "learning_rate": 2.665319697490205e-06, | |
| "loss": 0.3523, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.241012658227848, | |
| "grad_norm": 0.20799250426352872, | |
| "learning_rate": 2.637079980714945e-06, | |
| "loss": 0.3669, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.245063291139241, | |
| "grad_norm": 0.21707979113687223, | |
| "learning_rate": 2.6089801025105453e-06, | |
| "loss": 0.3706, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.249113924050633, | |
| "grad_norm": 0.214981195790325, | |
| "learning_rate": 2.581020289189571e-06, | |
| "loss": 0.3607, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.253164556962025, | |
| "grad_norm": 0.1990070403781358, | |
| "learning_rate": 2.553200765936501e-06, | |
| "loss": 0.3687, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.257215189873418, | |
| "grad_norm": 0.20461338858838066, | |
| "learning_rate": 2.525521756805962e-06, | |
| "loss": 0.3728, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.26126582278481, | |
| "grad_norm": 0.20534396049047576, | |
| "learning_rate": 2.497983484720885e-06, | |
| "loss": 0.3668, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.265316455696203, | |
| "grad_norm": 0.2018613229566384, | |
| "learning_rate": 2.470586171470728e-06, | |
| "loss": 0.3753, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.269367088607595, | |
| "grad_norm": 0.20552579834127008, | |
| "learning_rate": 2.4433300377096836e-06, | |
| "loss": 0.3704, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.273417721518987, | |
| "grad_norm": 0.19970645706925913, | |
| "learning_rate": 2.4162153029549073e-06, | |
| "loss": 0.3718, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.27746835443038, | |
| "grad_norm": 0.20026505318398374, | |
| "learning_rate": 2.3892421855847458e-06, | |
| "loss": 0.3836, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.281518987341772, | |
| "grad_norm": 0.21027219020513316, | |
| "learning_rate": 2.362410902836978e-06, | |
| "loss": 0.3596, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.285569620253165, | |
| "grad_norm": 0.2118451262746592, | |
| "learning_rate": 2.3357216708070653e-06, | |
| "loss": 0.3697, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.289620253164557, | |
| "grad_norm": 0.20233883361564772, | |
| "learning_rate": 2.309174704446411e-06, | |
| "loss": 0.3667, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.293670886075949, | |
| "grad_norm": 0.20270637402517822, | |
| "learning_rate": 2.2827702175606437e-06, | |
| "loss": 0.3597, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.297721518987342, | |
| "grad_norm": 0.20298746828581632, | |
| "learning_rate": 2.256508422807855e-06, | |
| "loss": 0.3667, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 4.301772151898734, | |
| "grad_norm": 0.20458200992114323, | |
| "learning_rate": 2.230389531696946e-06, | |
| "loss": 0.3651, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 4.305822784810126, | |
| "grad_norm": 0.20959424136592025, | |
| "learning_rate": 2.204413754585857e-06, | |
| "loss": 0.3699, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 4.309873417721519, | |
| "grad_norm": 0.2128210058431168, | |
| "learning_rate": 2.1785813006799406e-06, | |
| "loss": 0.3706, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 4.313924050632911, | |
| "grad_norm": 0.23248604856307095, | |
| "learning_rate": 2.1528923780302224e-06, | |
| "loss": 0.3705, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 4.317974683544303, | |
| "grad_norm": 0.20632093360575463, | |
| "learning_rate": 2.127347193531757e-06, | |
| "loss": 0.3735, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 4.322025316455696, | |
| "grad_norm": 0.20662097888350422, | |
| "learning_rate": 2.101945952921942e-06, | |
| "loss": 0.3685, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 4.326075949367088, | |
| "grad_norm": 0.20833808710608634, | |
| "learning_rate": 2.0766888607788906e-06, | |
| "loss": 0.3731, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 4.330126582278481, | |
| "grad_norm": 0.20278302919591829, | |
| "learning_rate": 2.0515761205197337e-06, | |
| "loss": 0.3577, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 4.334177215189873, | |
| "grad_norm": 0.21837350899250932, | |
| "learning_rate": 2.0266079343990453e-06, | |
| "loss": 0.3641, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.3382278481012655, | |
| "grad_norm": 0.21626643769958315, | |
| "learning_rate": 2.0017845035071494e-06, | |
| "loss": 0.3594, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 4.3422784810126585, | |
| "grad_norm": 0.20503880082099307, | |
| "learning_rate": 1.9771060277685537e-06, | |
| "loss": 0.3718, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 4.3463291139240505, | |
| "grad_norm": 0.20874805569291383, | |
| "learning_rate": 1.95257270594031e-06, | |
| "loss": 0.3778, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 4.3503797468354435, | |
| "grad_norm": 0.21206102902433097, | |
| "learning_rate": 1.9281847356104188e-06, | |
| "loss": 0.3828, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 4.3544303797468356, | |
| "grad_norm": 0.2166065958360577, | |
| "learning_rate": 1.9039423131962365e-06, | |
| "loss": 0.3697, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.358481012658228, | |
| "grad_norm": 0.21790940853039328, | |
| "learning_rate": 1.8798456339429027e-06, | |
| "loss": 0.3716, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 4.362531645569621, | |
| "grad_norm": 0.20606713748473623, | |
| "learning_rate": 1.8558948919217612e-06, | |
| "loss": 0.3639, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 4.366582278481013, | |
| "grad_norm": 0.1947323325298249, | |
| "learning_rate": 1.8320902800287954e-06, | |
| "loss": 0.3743, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 4.370632911392405, | |
| "grad_norm": 0.19914611357063197, | |
| "learning_rate": 1.8084319899830726e-06, | |
| "loss": 0.3849, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 4.374683544303798, | |
| "grad_norm": 0.20817045308452728, | |
| "learning_rate": 1.7849202123252097e-06, | |
| "loss": 0.368, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.37873417721519, | |
| "grad_norm": 0.20178263149439804, | |
| "learning_rate": 1.7615551364158401e-06, | |
| "loss": 0.3704, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 4.382784810126582, | |
| "grad_norm": 0.19809386859714087, | |
| "learning_rate": 1.738336950434061e-06, | |
| "loss": 0.3655, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 4.386835443037975, | |
| "grad_norm": 0.2037435484156601, | |
| "learning_rate": 1.715265841375957e-06, | |
| "loss": 0.3651, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 4.390886075949367, | |
| "grad_norm": 0.2116806539312197, | |
| "learning_rate": 1.6923419950530684e-06, | |
| "loss": 0.3709, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 4.39493670886076, | |
| "grad_norm": 0.20006758641049305, | |
| "learning_rate": 1.6695655960909008e-06, | |
| "loss": 0.3855, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 4.398987341772152, | |
| "grad_norm": 0.2052280215216722, | |
| "learning_rate": 1.646936827927441e-06, | |
| "loss": 0.3592, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 4.403037974683544, | |
| "grad_norm": 0.19971320977576953, | |
| "learning_rate": 1.6244558728116766e-06, | |
| "loss": 0.3703, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 4.407088607594937, | |
| "grad_norm": 0.20483161955919033, | |
| "learning_rate": 1.6021229118021265e-06, | |
| "loss": 0.3569, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 4.411139240506329, | |
| "grad_norm": 0.20350198618111182, | |
| "learning_rate": 1.5799381247653967e-06, | |
| "loss": 0.3597, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 4.415189873417722, | |
| "grad_norm": 0.20251067852038496, | |
| "learning_rate": 1.5579016903747013e-06, | |
| "loss": 0.3782, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.419240506329114, | |
| "grad_norm": 0.20496444710265416, | |
| "learning_rate": 1.5360137861084656e-06, | |
| "loss": 0.3624, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 4.423291139240506, | |
| "grad_norm": 0.19899724491931184, | |
| "learning_rate": 1.5142745882488475e-06, | |
| "loss": 0.3771, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 4.427341772151899, | |
| "grad_norm": 0.20081948661757276, | |
| "learning_rate": 1.4926842718803691e-06, | |
| "loss": 0.3671, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 4.431392405063291, | |
| "grad_norm": 0.19799720295608475, | |
| "learning_rate": 1.4712430108884657e-06, | |
| "loss": 0.3631, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 4.435443037974683, | |
| "grad_norm": 0.20402443350828575, | |
| "learning_rate": 1.4499509779581078e-06, | |
| "loss": 0.369, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 4.439493670886076, | |
| "grad_norm": 0.20203686152029354, | |
| "learning_rate": 1.4288083445723988e-06, | |
| "loss": 0.385, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 4.443544303797468, | |
| "grad_norm": 0.19693128366630258, | |
| "learning_rate": 1.4078152810112045e-06, | |
| "loss": 0.3603, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 4.44759493670886, | |
| "grad_norm": 0.20235386080871376, | |
| "learning_rate": 1.3869719563497697e-06, | |
| "loss": 0.3781, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 4.451645569620253, | |
| "grad_norm": 0.195405500740495, | |
| "learning_rate": 1.3662785384573663e-06, | |
| "loss": 0.3629, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 4.455696202531645, | |
| "grad_norm": 0.19168854165298244, | |
| "learning_rate": 1.3457351939959383e-06, | |
| "loss": 0.3717, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.459746835443038, | |
| "grad_norm": 0.1989151166679931, | |
| "learning_rate": 1.3253420884187551e-06, | |
| "loss": 0.3636, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 4.46379746835443, | |
| "grad_norm": 0.2031461991236245, | |
| "learning_rate": 1.3050993859690953e-06, | |
| "loss": 0.3672, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 4.467848101265822, | |
| "grad_norm": 0.19301451873678271, | |
| "learning_rate": 1.2850072496788869e-06, | |
| "loss": 0.3685, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 4.471898734177215, | |
| "grad_norm": 0.19147885094725345, | |
| "learning_rate": 1.2650658413674434e-06, | |
| "loss": 0.3712, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 4.4759493670886075, | |
| "grad_norm": 0.19426906670979383, | |
| "learning_rate": 1.2452753216401226e-06, | |
| "loss": 0.3658, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.19365285387095088, | |
| "learning_rate": 1.2256358498870503e-06, | |
| "loss": 0.3749, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 4.4840506329113925, | |
| "grad_norm": 0.19970098931469718, | |
| "learning_rate": 1.2061475842818337e-06, | |
| "loss": 0.3703, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 4.488101265822785, | |
| "grad_norm": 0.19665228000280865, | |
| "learning_rate": 1.1868106817802816e-06, | |
| "loss": 0.3736, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 4.4921518987341775, | |
| "grad_norm": 0.19549894514662816, | |
| "learning_rate": 1.1676252981191482e-06, | |
| "loss": 0.3638, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 4.49620253164557, | |
| "grad_norm": 0.19829355520960645, | |
| "learning_rate": 1.1485915878148823e-06, | |
| "loss": 0.3702, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.500253164556962, | |
| "grad_norm": 0.20024815716156083, | |
| "learning_rate": 1.1297097041623584e-06, | |
| "loss": 0.3657, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 4.504303797468355, | |
| "grad_norm": 0.19699001112848946, | |
| "learning_rate": 1.1109797992336847e-06, | |
| "loss": 0.3648, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 4.508354430379747, | |
| "grad_norm": 0.20185222140879966, | |
| "learning_rate": 1.092402023876933e-06, | |
| "loss": 0.3778, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 4.512405063291139, | |
| "grad_norm": 0.20579367848989755, | |
| "learning_rate": 1.0739765277149527e-06, | |
| "loss": 0.3699, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 4.516455696202532, | |
| "grad_norm": 0.20217953410887549, | |
| "learning_rate": 1.0557034591441596e-06, | |
| "loss": 0.3654, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 4.520506329113924, | |
| "grad_norm": 0.20025906754426578, | |
| "learning_rate": 1.0375829653333324e-06, | |
| "loss": 0.3684, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 4.524556962025317, | |
| "grad_norm": 0.2013741644452221, | |
| "learning_rate": 1.0196151922224385e-06, | |
| "loss": 0.3583, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 4.528607594936709, | |
| "grad_norm": 0.1931035924325039, | |
| "learning_rate": 1.0018002845214526e-06, | |
| "loss": 0.3647, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 4.532658227848101, | |
| "grad_norm": 0.2056383904410794, | |
| "learning_rate": 9.841383857091947e-07, | |
| "loss": 0.3652, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 4.536708860759494, | |
| "grad_norm": 0.2167731280067832, | |
| "learning_rate": 9.666296380321616e-07, | |
| "loss": 0.3514, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.540759493670886, | |
| "grad_norm": 0.18949598719854865, | |
| "learning_rate": 9.492741825034124e-07, | |
| "loss": 0.378, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 4.544810126582279, | |
| "grad_norm": 0.19729747504624592, | |
| "learning_rate": 9.320721589013892e-07, | |
| "loss": 0.3736, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 4.548860759493671, | |
| "grad_norm": 0.19773888349845925, | |
| "learning_rate": 9.150237057688339e-07, | |
| "loss": 0.362, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 4.552911392405063, | |
| "grad_norm": 0.19775726765053958, | |
| "learning_rate": 8.981289604116328e-07, | |
| "loss": 0.3684, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 4.556962025316456, | |
| "grad_norm": 0.19450990915648061, | |
| "learning_rate": 8.813880588977542e-07, | |
| "loss": 0.3778, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.561012658227848, | |
| "grad_norm": 0.19979736708473808, | |
| "learning_rate": 8.648011360561126e-07, | |
| "loss": 0.3561, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 4.56506329113924, | |
| "grad_norm": 0.2002945429319717, | |
| "learning_rate": 8.483683254755037e-07, | |
| "loss": 0.3541, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 4.569113924050633, | |
| "grad_norm": 0.2009398397073559, | |
| "learning_rate": 8.320897595035227e-07, | |
| "loss": 0.3663, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 4.573164556962025, | |
| "grad_norm": 0.19938474291938896, | |
| "learning_rate": 8.159655692455093e-07, | |
| "loss": 0.3564, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 4.577215189873417, | |
| "grad_norm": 0.2053831500990717, | |
| "learning_rate": 7.999958845634648e-07, | |
| "loss": 0.3689, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.58126582278481, | |
| "grad_norm": 0.2002666205430569, | |
| "learning_rate": 7.841808340750478e-07, | |
| "loss": 0.3908, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 4.585316455696202, | |
| "grad_norm": 0.19722575218962698, | |
| "learning_rate": 7.685205451524869e-07, | |
| "loss": 0.3801, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 4.589367088607595, | |
| "grad_norm": 0.1982693395304635, | |
| "learning_rate": 7.530151439216027e-07, | |
| "loss": 0.3732, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 4.593417721518987, | |
| "grad_norm": 0.1970493793762997, | |
| "learning_rate": 7.376647552607675e-07, | |
| "loss": 0.3705, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 4.597468354430379, | |
| "grad_norm": 0.1977876043852225, | |
| "learning_rate": 7.224695027998963e-07, | |
| "loss": 0.3713, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 4.601518987341772, | |
| "grad_norm": 0.19782936150519176, | |
| "learning_rate": 7.07429508919466e-07, | |
| "loss": 0.3652, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 4.605569620253164, | |
| "grad_norm": 0.19172441693328024, | |
| "learning_rate": 6.925448947495206e-07, | |
| "loss": 0.3741, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 4.609620253164557, | |
| "grad_norm": 0.1902556406650782, | |
| "learning_rate": 6.778157801686936e-07, | |
| "loss": 0.3807, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 4.613670886075949, | |
| "grad_norm": 0.19361001895133612, | |
| "learning_rate": 6.632422838032515e-07, | |
| "loss": 0.3702, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 4.6177215189873415, | |
| "grad_norm": 0.20171357861933498, | |
| "learning_rate": 6.488245230261281e-07, | |
| "loss": 0.3623, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.6217721518987345, | |
| "grad_norm": 0.19450110326742778, | |
| "learning_rate": 6.345626139559868e-07, | |
| "loss": 0.365, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 4.6258227848101265, | |
| "grad_norm": 0.19279589447192502, | |
| "learning_rate": 6.204566714562866e-07, | |
| "loss": 0.3657, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 4.629873417721519, | |
| "grad_norm": 0.1891792082212798, | |
| "learning_rate": 6.06506809134344e-07, | |
| "loss": 0.3744, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 4.633924050632912, | |
| "grad_norm": 0.20770590369979192, | |
| "learning_rate": 5.927131393404373e-07, | |
| "loss": 0.3669, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 4.637974683544304, | |
| "grad_norm": 0.1973042212462415, | |
| "learning_rate": 5.790757731668817e-07, | |
| "loss": 0.3782, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 4.642025316455696, | |
| "grad_norm": 0.19665404411387785, | |
| "learning_rate": 5.655948204471507e-07, | |
| "loss": 0.3777, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 4.646075949367089, | |
| "grad_norm": 0.19861713550745552, | |
| "learning_rate": 5.522703897549875e-07, | |
| "loss": 0.3898, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 4.650126582278481, | |
| "grad_norm": 0.1982323502976002, | |
| "learning_rate": 5.391025884035239e-07, | |
| "loss": 0.3731, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 4.654177215189874, | |
| "grad_norm": 0.19478016162515197, | |
| "learning_rate": 5.260915224444207e-07, | |
| "loss": 0.389, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 4.658227848101266, | |
| "grad_norm": 0.19719066950952938, | |
| "learning_rate": 5.132372966670129e-07, | |
| "loss": 0.3687, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.662278481012658, | |
| "grad_norm": 0.20381126769759292, | |
| "learning_rate": 5.005400145974704e-07, | |
| "loss": 0.3516, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 4.666329113924051, | |
| "grad_norm": 0.21065490548614954, | |
| "learning_rate": 4.879997784979562e-07, | |
| "loss": 0.3621, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 4.670379746835443, | |
| "grad_norm": 0.1909279592487378, | |
| "learning_rate": 4.7561668936580984e-07, | |
| "loss": 0.3789, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 4.674430379746836, | |
| "grad_norm": 0.19744198060802876, | |
| "learning_rate": 4.6339084693272306e-07, | |
| "loss": 0.3807, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 4.678481012658228, | |
| "grad_norm": 0.20286663873428457, | |
| "learning_rate": 4.5132234966395847e-07, | |
| "loss": 0.3624, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 4.68253164556962, | |
| "grad_norm": 0.2038201677001349, | |
| "learning_rate": 4.3941129475752795e-07, | |
| "loss": 0.3633, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 4.686582278481013, | |
| "grad_norm": 0.19378805976242464, | |
| "learning_rate": 4.27657778143431e-07, | |
| "loss": 0.3717, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 4.690632911392405, | |
| "grad_norm": 0.19836069228993353, | |
| "learning_rate": 4.1606189448287757e-07, | |
| "loss": 0.3696, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 4.694683544303797, | |
| "grad_norm": 0.2003501897772341, | |
| "learning_rate": 4.046237371675177e-07, | |
| "loss": 0.3693, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 4.69873417721519, | |
| "grad_norm": 0.2002909191004781, | |
| "learning_rate": 3.9334339831869963e-07, | |
| "loss": 0.3696, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.702784810126582, | |
| "grad_norm": 0.19302104583763816, | |
| "learning_rate": 3.8222096878671955e-07, | |
| "loss": 0.371, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 4.706835443037974, | |
| "grad_norm": 0.19936465127981864, | |
| "learning_rate": 3.7125653815009545e-07, | |
| "loss": 0.3711, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 4.710886075949367, | |
| "grad_norm": 0.19608820295892057, | |
| "learning_rate": 3.6045019471484974e-07, | |
| "loss": 0.3763, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 4.714936708860759, | |
| "grad_norm": 0.19528544524784472, | |
| "learning_rate": 3.498020255137813e-07, | |
| "loss": 0.3759, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 4.718987341772152, | |
| "grad_norm": 0.19027108814032834, | |
| "learning_rate": 3.393121163057811e-07, | |
| "loss": 0.3787, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 4.723037974683544, | |
| "grad_norm": 0.190751056390856, | |
| "learning_rate": 3.289805515751399e-07, | |
| "loss": 0.3789, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 4.727088607594936, | |
| "grad_norm": 0.19018446397745253, | |
| "learning_rate": 3.188074145308573e-07, | |
| "loss": 0.3704, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 4.731139240506329, | |
| "grad_norm": 0.19280650352498363, | |
| "learning_rate": 3.087927871059804e-07, | |
| "loss": 0.3755, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 4.735189873417721, | |
| "grad_norm": 0.1993488817814719, | |
| "learning_rate": 2.989367499569418e-07, | |
| "loss": 0.3723, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 4.739240506329114, | |
| "grad_norm": 0.1964693866269329, | |
| "learning_rate": 2.8923938246290917e-07, | |
| "loss": 0.3641, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.743291139240506, | |
| "grad_norm": 0.19814854748267932, | |
| "learning_rate": 2.7970076272514804e-07, | |
| "loss": 0.362, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 4.7473417721518985, | |
| "grad_norm": 0.1872480286124407, | |
| "learning_rate": 2.703209675663887e-07, | |
| "loss": 0.383, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 4.751392405063291, | |
| "grad_norm": 0.19567977488750696, | |
| "learning_rate": 2.6110007253021374e-07, | |
| "loss": 0.3668, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 4.7554430379746835, | |
| "grad_norm": 0.19785047840077458, | |
| "learning_rate": 2.520381518804471e-07, | |
| "loss": 0.3754, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 4.759493670886076, | |
| "grad_norm": 0.19390834188199224, | |
| "learning_rate": 2.4313527860054585e-07, | |
| "loss": 0.3688, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.7635443037974685, | |
| "grad_norm": 0.18736341473743742, | |
| "learning_rate": 2.343915243930317e-07, | |
| "loss": 0.3692, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 4.767594936708861, | |
| "grad_norm": 0.19656104506659142, | |
| "learning_rate": 2.2580695967889367e-07, | |
| "loss": 0.3687, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 4.7716455696202535, | |
| "grad_norm": 0.19271883447822538, | |
| "learning_rate": 2.1738165359704189e-07, | |
| "loss": 0.3712, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 4.775696202531646, | |
| "grad_norm": 0.1973038938289027, | |
| "learning_rate": 2.0911567400373257e-07, | |
| "loss": 0.3685, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 4.779746835443038, | |
| "grad_norm": 0.19463660747370465, | |
| "learning_rate": 2.0100908747202607e-07, | |
| "loss": 0.3703, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.783797468354431, | |
| "grad_norm": 0.19456683151812587, | |
| "learning_rate": 1.9306195929125638e-07, | |
| "loss": 0.3704, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 4.787848101265823, | |
| "grad_norm": 0.19431278369990157, | |
| "learning_rate": 1.8527435346650247e-07, | |
| "loss": 0.3647, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 4.791898734177215, | |
| "grad_norm": 0.19307066710229734, | |
| "learning_rate": 1.7764633271807108e-07, | |
| "loss": 0.3648, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 4.795949367088608, | |
| "grad_norm": 0.18947942350530061, | |
| "learning_rate": 1.7017795848099262e-07, | |
| "loss": 0.3676, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.20041649621852745, | |
| "learning_rate": 1.6286929090452596e-07, | |
| "loss": 0.3589, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 4.804050632911393, | |
| "grad_norm": 0.20017240714629844, | |
| "learning_rate": 1.557203888516745e-07, | |
| "loss": 0.3578, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 4.808101265822785, | |
| "grad_norm": 0.19570389918229877, | |
| "learning_rate": 1.487313098987131e-07, | |
| "loss": 0.3685, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 4.812151898734177, | |
| "grad_norm": 0.19082862238865853, | |
| "learning_rate": 1.4190211033472402e-07, | |
| "loss": 0.384, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 4.81620253164557, | |
| "grad_norm": 0.18679813542766394, | |
| "learning_rate": 1.3523284516113955e-07, | |
| "loss": 0.3783, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 4.820253164556962, | |
| "grad_norm": 0.19661130566995297, | |
| "learning_rate": 1.2872356809130682e-07, | |
| "loss": 0.3752, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.824303797468355, | |
| "grad_norm": 0.19391957230969104, | |
| "learning_rate": 1.2237433155004807e-07, | |
| "loss": 0.3706, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 4.828354430379747, | |
| "grad_norm": 0.20058116435785212, | |
| "learning_rate": 1.1618518667323886e-07, | |
| "loss": 0.3675, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 4.832405063291139, | |
| "grad_norm": 0.19198612751526883, | |
| "learning_rate": 1.1015618330740385e-07, | |
| "loss": 0.3743, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 4.836455696202532, | |
| "grad_norm": 0.19297074492098343, | |
| "learning_rate": 1.042873700093061e-07, | |
| "loss": 0.3841, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 4.840506329113924, | |
| "grad_norm": 0.19608792911552783, | |
| "learning_rate": 9.857879404556291e-08, | |
| "loss": 0.3756, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 4.844556962025316, | |
| "grad_norm": 0.19101916300205224, | |
| "learning_rate": 9.303050139225722e-08, | |
| "loss": 0.3729, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 4.848607594936709, | |
| "grad_norm": 0.1949415869902579, | |
| "learning_rate": 8.76425367345779e-08, | |
| "loss": 0.3756, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 4.852658227848101, | |
| "grad_norm": 0.19249833424262044, | |
| "learning_rate": 8.241494346644897e-08, | |
| "loss": 0.37, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 4.856708860759493, | |
| "grad_norm": 0.23286209050406617, | |
| "learning_rate": 7.734776369019204e-08, | |
| "loss": 0.3698, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 4.860759493670886, | |
| "grad_norm": 0.19340348074693778, | |
| "learning_rate": 7.244103821617332e-08, | |
| "loss": 0.3622, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.864810126582278, | |
| "grad_norm": 0.1909225852090869, | |
| "learning_rate": 6.769480656248606e-08, | |
| "loss": 0.3764, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 4.868860759493671, | |
| "grad_norm": 0.19385442214560197, | |
| "learning_rate": 6.310910695462635e-08, | |
| "loss": 0.3781, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 4.872911392405063, | |
| "grad_norm": 0.1911492486362271, | |
| "learning_rate": 5.8683976325191185e-08, | |
| "loss": 0.3604, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 4.876962025316455, | |
| "grad_norm": 0.18974172573682738, | |
| "learning_rate": 5.4419450313571984e-08, | |
| "loss": 0.3754, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 4.881012658227848, | |
| "grad_norm": 0.19609372990000615, | |
| "learning_rate": 5.031556326567488e-08, | |
| "loss": 0.3737, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 4.88506329113924, | |
| "grad_norm": 0.20612469170085448, | |
| "learning_rate": 4.637234823364312e-08, | |
| "loss": 0.3804, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 4.889113924050633, | |
| "grad_norm": 0.18577331687414014, | |
| "learning_rate": 4.258983697558838e-08, | |
| "loss": 0.3728, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 4.8931645569620255, | |
| "grad_norm": 0.19330057506595444, | |
| "learning_rate": 3.896805995533548e-08, | |
| "loss": 0.3681, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 4.8972151898734175, | |
| "grad_norm": 0.1980700084393809, | |
| "learning_rate": 3.550704634218028e-08, | |
| "loss": 0.3682, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 4.9012658227848105, | |
| "grad_norm": 0.19784576796836684, | |
| "learning_rate": 3.2206824010647676e-08, | |
| "loss": 0.355, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.905316455696203, | |
| "grad_norm": 0.1956268676122362, | |
| "learning_rate": 2.9067419540278476e-08, | |
| "loss": 0.3561, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 4.909367088607595, | |
| "grad_norm": 0.19159434473455017, | |
| "learning_rate": 2.6088858215400638e-08, | |
| "loss": 0.3729, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 4.913417721518988, | |
| "grad_norm": 0.19780025649020805, | |
| "learning_rate": 2.3271164024940564e-08, | |
| "loss": 0.377, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 4.91746835443038, | |
| "grad_norm": 0.19339086733272057, | |
| "learning_rate": 2.061435966221881e-08, | |
| "loss": 0.3716, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 4.921518987341772, | |
| "grad_norm": 0.1907790419174612, | |
| "learning_rate": 1.811846652477245e-08, | |
| "loss": 0.3861, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 4.925569620253165, | |
| "grad_norm": 0.19657974787570867, | |
| "learning_rate": 1.5783504714184106e-08, | |
| "loss": 0.3749, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 4.929620253164557, | |
| "grad_norm": 0.1964104046217562, | |
| "learning_rate": 1.360949303591097e-08, | |
| "loss": 0.376, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 4.93367088607595, | |
| "grad_norm": 0.19755700041732416, | |
| "learning_rate": 1.1596448999144916e-08, | |
| "loss": 0.3558, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 4.937721518987342, | |
| "grad_norm": 0.1917928696270574, | |
| "learning_rate": 9.744388816668172e-09, | |
| "loss": 0.3731, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 4.941772151898734, | |
| "grad_norm": 0.18988638296207275, | |
| "learning_rate": 8.05332740472009e-09, | |
| "loss": 0.3724, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.945822784810127, | |
| "grad_norm": 0.20178301101296467, | |
| "learning_rate": 6.523278382872811e-09, | |
| "loss": 0.3537, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 4.949873417721519, | |
| "grad_norm": 0.1992005662267341, | |
| "learning_rate": 5.15425407393133e-09, | |
| "loss": 0.3816, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 4.953924050632912, | |
| "grad_norm": 0.19050865077285728, | |
| "learning_rate": 3.94626550383137e-09, | |
| "loss": 0.3742, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 4.957974683544304, | |
| "grad_norm": 0.1872926460109918, | |
| "learning_rate": 2.899322401546112e-09, | |
| "loss": 0.3895, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 4.962025316455696, | |
| "grad_norm": 0.20099451486761116, | |
| "learning_rate": 2.013433199010706e-09, | |
| "loss": 0.3608, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.966075949367089, | |
| "grad_norm": 0.19890470296681306, | |
| "learning_rate": 1.2886050310556563e-09, | |
| "loss": 0.3743, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 4.970126582278481, | |
| "grad_norm": 0.1966593391401625, | |
| "learning_rate": 7.248437353468695e-10, | |
| "loss": 0.3623, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 4.974177215189873, | |
| "grad_norm": 0.1991880070762382, | |
| "learning_rate": 3.221538523412449e-10, | |
| "loss": 0.3808, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 4.978227848101266, | |
| "grad_norm": 0.19001535140018364, | |
| "learning_rate": 8.053862524670663e-11, | |
| "loss": 0.375, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 4.982278481012658, | |
| "grad_norm": 0.18580033503179685, | |
| "learning_rate": 0.0, | |
| "loss": 0.3718, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.982278481012658, | |
| "step": 1230, | |
| "total_flos": 5.917335126752231e+18, | |
| "train_loss": 0.49487486435630457, | |
| "train_runtime": 63553.2393, | |
| "train_samples_per_second": 2.486, | |
| "train_steps_per_second": 0.019 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.917335126752231e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |