| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9725490196078432, |
| "eval_steps": 500, |
| "global_step": 126, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.023529411764705882, |
| "grad_norm": 6.901778221130371, |
| "learning_rate": 7.692307692307694e-07, |
| "loss": 1.1119, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.047058823529411764, |
| "grad_norm": 7.4228010177612305, |
| "learning_rate": 1.5384615384615387e-06, |
| "loss": 1.1537, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.07058823529411765, |
| "grad_norm": 6.921288967132568, |
| "learning_rate": 2.307692307692308e-06, |
| "loss": 1.1207, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.09411764705882353, |
| "grad_norm": 6.287109851837158, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.0477, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.11764705882352941, |
| "grad_norm": 5.439420700073242, |
| "learning_rate": 3.846153846153847e-06, |
| "loss": 1.0753, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.1411764705882353, |
| "grad_norm": 3.6856906414031982, |
| "learning_rate": 4.615384615384616e-06, |
| "loss": 0.9761, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.16470588235294117, |
| "grad_norm": 2.859872341156006, |
| "learning_rate": 5.384615384615385e-06, |
| "loss": 0.958, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.18823529411764706, |
| "grad_norm": 4.026933670043945, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 1.0194, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.21176470588235294, |
| "grad_norm": 4.821041107177734, |
| "learning_rate": 6.923076923076923e-06, |
| "loss": 0.9508, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.23529411764705882, |
| "grad_norm": 4.604616641998291, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 1.0138, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.25882352941176473, |
| "grad_norm": 3.8580994606018066, |
| "learning_rate": 8.461538461538462e-06, |
| "loss": 0.9089, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.2823529411764706, |
| "grad_norm": 3.7659692764282227, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 1.0376, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.3058823529411765, |
| "grad_norm": 2.5426435470581055, |
| "learning_rate": 1e-05, |
| "loss": 0.9651, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.32941176470588235, |
| "grad_norm": 2.092756986618042, |
| "learning_rate": 9.998067787472772e-06, |
| "loss": 0.9098, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.35294117647058826, |
| "grad_norm": 2.1536059379577637, |
| "learning_rate": 9.992272643269181e-06, |
| "loss": 0.8308, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3764705882352941, |
| "grad_norm": 1.9728121757507324, |
| "learning_rate": 9.982619046369321e-06, |
| "loss": 0.9148, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 1.6017658710479736, |
| "learning_rate": 9.96911445789354e-06, |
| "loss": 0.8949, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.4235294117647059, |
| "grad_norm": 1.4864757061004639, |
| "learning_rate": 9.951769315335843e-06, |
| "loss": 0.8593, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.4470588235294118, |
| "grad_norm": 1.447627305984497, |
| "learning_rate": 9.930597024496933e-06, |
| "loss": 0.8316, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.47058823529411764, |
| "grad_norm": 1.1549137830734253, |
| "learning_rate": 9.905613949123036e-06, |
| "loss": 0.8079, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.49411764705882355, |
| "grad_norm": 1.2319858074188232, |
| "learning_rate": 9.87683939825864e-06, |
| "loss": 0.8833, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.5176470588235295, |
| "grad_norm": 1.2558043003082275, |
| "learning_rate": 9.844295611322804e-06, |
| "loss": 0.8729, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.5411764705882353, |
| "grad_norm": 1.0202245712280273, |
| "learning_rate": 9.808007740920647e-06, |
| "loss": 0.7801, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.5647058823529412, |
| "grad_norm": 0.9334889054298401, |
| "learning_rate": 9.768003833403278e-06, |
| "loss": 0.8134, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.5882352941176471, |
| "grad_norm": 1.1106406450271606, |
| "learning_rate": 9.724314807191197e-06, |
| "loss": 0.8359, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.611764705882353, |
| "grad_norm": 0.9234170317649841, |
| "learning_rate": 9.6769744288779e-06, |
| "loss": 0.8229, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.6352941176470588, |
| "grad_norm": 0.7843503355979919, |
| "learning_rate": 9.626019287132202e-06, |
| "loss": 0.7927, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.6588235294117647, |
| "grad_norm": 0.8040333390235901, |
| "learning_rate": 9.571488764419381e-06, |
| "loss": 0.8129, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.6823529411764706, |
| "grad_norm": 0.7696279287338257, |
| "learning_rate": 9.51342500656308e-06, |
| "loss": 0.8572, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.7058823529411765, |
| "grad_norm": 0.6732362508773804, |
| "learning_rate": 9.451872890171419e-06, |
| "loss": 0.8103, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.7294117647058823, |
| "grad_norm": 0.6914750337600708, |
| "learning_rate": 9.386879987952549e-06, |
| "loss": 0.8969, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.7529411764705882, |
| "grad_norm": 0.6257173418998718, |
| "learning_rate": 9.318496531946411e-06, |
| "loss": 0.818, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.7764705882352941, |
| "grad_norm": 0.7246304750442505, |
| "learning_rate": 9.246775374701139e-06, |
| "loss": 0.8332, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.7830196022987366, |
| "learning_rate": 9.171771948424138e-06, |
| "loss": 0.8585, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.8235294117647058, |
| "grad_norm": 0.653729259967804, |
| "learning_rate": 9.093544222139338e-06, |
| "loss": 0.8725, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.8470588235294118, |
| "grad_norm": 0.742987871170044, |
| "learning_rate": 9.012152656883824e-06, |
| "loss": 0.785, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.8705882352941177, |
| "grad_norm": 0.8727246522903442, |
| "learning_rate": 8.927660158978392e-06, |
| "loss": 0.8348, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.8941176470588236, |
| "grad_norm": 0.6196137070655823, |
| "learning_rate": 8.84013203140821e-06, |
| "loss": 0.8418, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.9176470588235294, |
| "grad_norm": 0.610687255859375, |
| "learning_rate": 8.749635923351108e-06, |
| "loss": 0.776, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.9411764705882353, |
| "grad_norm": 0.6278626322746277, |
| "learning_rate": 8.656241777892544e-06, |
| "loss": 0.7207, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.9647058823529412, |
| "grad_norm": 0.5417529344558716, |
| "learning_rate": 8.56002177796765e-06, |
| "loss": 0.7694, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.9882352941176471, |
| "grad_norm": 0.5784769654273987, |
| "learning_rate": 8.461050290572114e-06, |
| "loss": 0.7733, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.0156862745098039, |
| "grad_norm": 1.1008639335632324, |
| "learning_rate": 8.359403809285054e-06, |
| "loss": 1.312, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.0392156862745099, |
| "grad_norm": 0.5411049723625183, |
| "learning_rate": 8.255160895148263e-06, |
| "loss": 0.7666, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.0627450980392157, |
| "grad_norm": 0.602947473526001, |
| "learning_rate": 8.14840211594757e-06, |
| "loss": 0.7368, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.0862745098039215, |
| "grad_norm": 0.6235800385475159, |
| "learning_rate": 8.039209983943201e-06, |
| "loss": 0.7976, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.1098039215686275, |
| "grad_norm": 0.569098174571991, |
| "learning_rate": 7.927668892097288e-06, |
| "loss": 0.7109, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.1333333333333333, |
| "grad_norm": 0.5677081942558289, |
| "learning_rate": 7.81386504884782e-06, |
| "loss": 0.738, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.156862745098039, |
| "grad_norm": 0.7025531530380249, |
| "learning_rate": 7.697886411479422e-06, |
| "loss": 0.8267, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.1803921568627451, |
| "grad_norm": 0.5158477425575256, |
| "learning_rate": 7.579822618142505e-06, |
| "loss": 0.7993, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.203921568627451, |
| "grad_norm": 0.7206972241401672, |
| "learning_rate": 7.459764918573264e-06, |
| "loss": 0.8324, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.227450980392157, |
| "grad_norm": 0.5990767478942871, |
| "learning_rate": 7.3378061035681415e-06, |
| "loss": 0.73, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.2509803921568627, |
| "grad_norm": 0.5361316204071045, |
| "learning_rate": 7.2140404332671986e-06, |
| "loss": 0.7399, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.2745098039215685, |
| "grad_norm": 0.6394158601760864, |
| "learning_rate": 7.088563564301874e-06, |
| "loss": 0.8289, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.2980392156862746, |
| "grad_norm": 0.5778906345367432, |
| "learning_rate": 6.961472475863406e-06, |
| "loss": 0.7455, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.3215686274509804, |
| "grad_norm": 0.453545480966568, |
| "learning_rate": 6.832865394749065e-06, |
| "loss": 0.6694, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.3450980392156864, |
| "grad_norm": 0.5790985226631165, |
| "learning_rate": 6.702841719444141e-06, |
| "loss": 0.8093, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.3686274509803922, |
| "grad_norm": 0.48918935656547546, |
| "learning_rate": 6.571501943298335e-06, |
| "loss": 0.7096, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.392156862745098, |
| "grad_norm": 0.6267134547233582, |
| "learning_rate": 6.4389475768559675e-06, |
| "loss": 0.814, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.415686274509804, |
| "grad_norm": 0.4953418970108032, |
| "learning_rate": 6.305281069399989e-06, |
| "loss": 0.6618, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.4392156862745098, |
| "grad_norm": 0.5042392611503601, |
| "learning_rate": 6.17060572977047e-06, |
| "loss": 0.6929, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.4627450980392158, |
| "grad_norm": 0.5538609027862549, |
| "learning_rate": 6.035025646518747e-06, |
| "loss": 0.7561, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.4862745098039216, |
| "grad_norm": 0.6071920394897461, |
| "learning_rate": 5.898645607458941e-06, |
| "loss": 0.7797, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.5098039215686274, |
| "grad_norm": 0.5658022165298462, |
| "learning_rate": 5.761571018679025e-06, |
| "loss": 0.7374, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.5333333333333332, |
| "grad_norm": 0.5699365139007568, |
| "learning_rate": 5.623907823074044e-06, |
| "loss": 0.8136, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.5568627450980392, |
| "grad_norm": 0.47883421182632446, |
| "learning_rate": 5.48576241846443e-06, |
| "loss": 0.7933, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.5803921568627453, |
| "grad_norm": 0.476575642824173, |
| "learning_rate": 5.347241575362729e-06, |
| "loss": 0.7209, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.603921568627451, |
| "grad_norm": 0.5210950970649719, |
| "learning_rate": 5.208452354452275e-06, |
| "loss": 0.7747, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.6274509803921569, |
| "grad_norm": 0.47601795196533203, |
| "learning_rate": 5.069502023841576e-06, |
| "loss": 0.7635, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.6509803921568627, |
| "grad_norm": 0.521950364112854, |
| "learning_rate": 4.9304979761584256e-06, |
| "loss": 0.7707, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.6745098039215687, |
| "grad_norm": 0.4452243745326996, |
| "learning_rate": 4.791547645547727e-06, |
| "loss": 0.6827, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.6980392156862745, |
| "grad_norm": 0.5476846098899841, |
| "learning_rate": 4.652758424637271e-06, |
| "loss": 0.7939, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.7215686274509805, |
| "grad_norm": 0.480186402797699, |
| "learning_rate": 4.514237581535571e-06, |
| "loss": 0.7367, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.7450980392156863, |
| "grad_norm": 0.5076435804367065, |
| "learning_rate": 4.3760921769259585e-06, |
| "loss": 0.6935, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.768627450980392, |
| "grad_norm": 0.5008230805397034, |
| "learning_rate": 4.2384289813209754e-06, |
| "loss": 0.7474, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.792156862745098, |
| "grad_norm": 0.44636473059654236, |
| "learning_rate": 4.101354392541061e-06, |
| "loss": 0.7357, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.815686274509804, |
| "grad_norm": 0.4537220597267151, |
| "learning_rate": 3.964974353481254e-06, |
| "loss": 0.7329, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.83921568627451, |
| "grad_norm": 0.4459396302700043, |
| "learning_rate": 3.829394270229531e-06, |
| "loss": 0.7294, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.8627450980392157, |
| "grad_norm": 0.3966139256954193, |
| "learning_rate": 3.694718930600012e-06, |
| "loss": 0.6419, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.8862745098039215, |
| "grad_norm": 0.4331508278846741, |
| "learning_rate": 3.5610524231440324e-06, |
| "loss": 0.789, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.9098039215686273, |
| "grad_norm": 0.44077518582344055, |
| "learning_rate": 3.428498056701665e-06, |
| "loss": 0.7499, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.9333333333333333, |
| "grad_norm": 0.428218275308609, |
| "learning_rate": 3.2971582805558622e-06, |
| "loss": 0.7664, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.9568627450980394, |
| "grad_norm": 0.45457276701927185, |
| "learning_rate": 3.167134605250938e-06, |
| "loss": 0.7651, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.9803921568627452, |
| "grad_norm": 0.4581441283226013, |
| "learning_rate": 3.0385275241365965e-06, |
| "loss": 0.771, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.007843137254902, |
| "grad_norm": 0.8593717813491821, |
| "learning_rate": 2.9114364356981274e-06, |
| "loss": 1.2373, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.0313725490196077, |
| "grad_norm": 0.38274502754211426, |
| "learning_rate": 2.7859595667328027e-06, |
| "loss": 0.7255, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.0549019607843135, |
| "grad_norm": 0.4255603551864624, |
| "learning_rate": 2.6621938964318593e-06, |
| "loss": 0.6407, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.0784313725490198, |
| "grad_norm": 0.4372337758541107, |
| "learning_rate": 2.5402350814267364e-06, |
| "loss": 0.6874, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.1019607843137256, |
| "grad_norm": 0.4747924506664276, |
| "learning_rate": 2.4201773818574956e-06, |
| "loss": 0.654, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.1254901960784314, |
| "grad_norm": 0.45802468061447144, |
| "learning_rate": 2.302113588520578e-06, |
| "loss": 0.6809, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.149019607843137, |
| "grad_norm": 0.39464399218559265, |
| "learning_rate": 2.1861349511521817e-06, |
| "loss": 0.6086, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.172549019607843, |
| "grad_norm": 0.4720733165740967, |
| "learning_rate": 2.072331107902713e-06, |
| "loss": 0.9134, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.196078431372549, |
| "grad_norm": 0.4298717677593231, |
| "learning_rate": 1.960790016056801e-06, |
| "loss": 0.6735, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.219607843137255, |
| "grad_norm": 0.4402537941932678, |
| "learning_rate": 1.8515978840524302e-06, |
| "loss": 0.6972, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.243137254901961, |
| "grad_norm": 0.45702147483825684, |
| "learning_rate": 1.7448391048517378e-06, |
| "loss": 0.7225, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.2666666666666666, |
| "grad_norm": 0.4096600413322449, |
| "learning_rate": 1.640596190714947e-06, |
| "loss": 0.7225, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.2901960784313724, |
| "grad_norm": 0.43652820587158203, |
| "learning_rate": 1.5389497094278861e-06, |
| "loss": 0.7208, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.313725490196078, |
| "grad_norm": 0.44025924801826477, |
| "learning_rate": 1.4399782220323515e-06, |
| "loss": 0.6707, |
| "step": 98 |
| }, |
| { |
| "epoch": 2.3372549019607844, |
| "grad_norm": 0.5045623779296875, |
| "learning_rate": 1.3437582221074574e-06, |
| "loss": 0.7924, |
| "step": 99 |
| }, |
| { |
| "epoch": 2.3607843137254902, |
| "grad_norm": 0.48436567187309265, |
| "learning_rate": 1.250364076648894e-06, |
| "loss": 0.7384, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.384313725490196, |
| "grad_norm": 0.3896447718143463, |
| "learning_rate": 1.1598679685917901e-06, |
| "loss": 0.6666, |
| "step": 101 |
| }, |
| { |
| "epoch": 2.407843137254902, |
| "grad_norm": 0.40689510107040405, |
| "learning_rate": 1.0723398410216085e-06, |
| "loss": 0.8292, |
| "step": 102 |
| }, |
| { |
| "epoch": 2.431372549019608, |
| "grad_norm": 0.40202853083610535, |
| "learning_rate": 9.878473431161767e-07, |
| "loss": 0.6669, |
| "step": 103 |
| }, |
| { |
| "epoch": 2.454901960784314, |
| "grad_norm": 0.3605956733226776, |
| "learning_rate": 9.064557778606631e-07, |
| "loss": 0.6017, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.4784313725490197, |
| "grad_norm": 0.4316107928752899, |
| "learning_rate": 8.282280515758639e-07, |
| "loss": 0.7825, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.5019607843137255, |
| "grad_norm": 0.46594148874282837, |
| "learning_rate": 7.532246252988617e-07, |
| "loss": 0.7446, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.5254901960784313, |
| "grad_norm": 0.42160096764564514, |
| "learning_rate": 6.815034680535915e-07, |
| "loss": 0.713, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.549019607843137, |
| "grad_norm": 0.4095713794231415, |
| "learning_rate": 6.131200120474512e-07, |
| "loss": 0.7409, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.572549019607843, |
| "grad_norm": 0.40359240770339966, |
| "learning_rate": 5.481271098285818e-07, |
| "loss": 0.7501, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.596078431372549, |
| "grad_norm": 0.3566288352012634, |
| "learning_rate": 4.865749934369224e-07, |
| "loss": 0.6082, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.619607843137255, |
| "grad_norm": 0.417287141084671, |
| "learning_rate": 4.2851123558061927e-07, |
| "loss": 0.7517, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.6431372549019607, |
| "grad_norm": 0.36074298620224, |
| "learning_rate": 3.739807128677986e-07, |
| "loss": 0.6589, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.37484005093574524, |
| "learning_rate": 3.230255711220992e-07, |
| "loss": 0.7009, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.6901960784313728, |
| "grad_norm": 0.3959641754627228, |
| "learning_rate": 2.756851928088056e-07, |
| "loss": 0.7579, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.7137254901960786, |
| "grad_norm": 0.3696475625038147, |
| "learning_rate": 2.3199616659672352e-07, |
| "loss": 0.8004, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.7372549019607844, |
| "grad_norm": 0.3429110646247864, |
| "learning_rate": 1.9199225907935492e-07, |
| "loss": 0.6913, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.76078431372549, |
| "grad_norm": 0.36563417315483093, |
| "learning_rate": 1.5570438867719695e-07, |
| "loss": 0.6749, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.784313725490196, |
| "grad_norm": 0.41839203238487244, |
| "learning_rate": 1.2316060174136e-07, |
| "loss": 0.93, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.8078431372549018, |
| "grad_norm": 0.329913854598999, |
| "learning_rate": 9.43860508769645e-08, |
| "loss": 0.5853, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.831372549019608, |
| "grad_norm": 0.44181394577026367, |
| "learning_rate": 6.940297550306895e-08, |
| "loss": 0.7548, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.854901960784314, |
| "grad_norm": 0.3787192702293396, |
| "learning_rate": 4.823068466415615e-08, |
| "loss": 0.7454, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.8784313725490196, |
| "grad_norm": 0.4256850481033325, |
| "learning_rate": 3.088554210646133e-08, |
| "loss": 0.8, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.9019607843137254, |
| "grad_norm": 0.3509824573993683, |
| "learning_rate": 1.7380953630678488e-08, |
| "loss": 0.7289, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.9254901960784316, |
| "grad_norm": 0.4128064513206482, |
| "learning_rate": 7.727356730820035e-09, |
| "loss": 0.6974, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.9490196078431374, |
| "grad_norm": 0.36814892292022705, |
| "learning_rate": 1.9322125272297488e-09, |
| "loss": 0.765, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.9725490196078432, |
| "grad_norm": 0.3678930401802063, |
| "learning_rate": 0.0, |
| "loss": 0.6713, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.9725490196078432, |
| "step": 126, |
| "total_flos": 121055548211200.0, |
| "train_loss": 0.7961941848671625, |
| "train_runtime": 4090.1751, |
| "train_samples_per_second": 2.985, |
| "train_steps_per_second": 0.031 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 126, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 121055548211200.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|