| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "global_step": 578, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.996308123523995e-05, | |
| "loss": 2.529, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.98524339805751e-05, | |
| "loss": 2.3135, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.966838503280275e-05, | |
| "loss": 2.0123, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.941147798070748e-05, | |
| "loss": 1.8671, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9082471599571015e-05, | |
| "loss": 1.8716, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.8682337610128526e-05, | |
| "loss": 1.8011, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.821225780859032e-05, | |
| "loss": 1.6831, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7673620576205294e-05, | |
| "loss": 1.6496, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7131541204339604e-05, | |
| "loss": 1.5882, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.646719126488686e-05, | |
| "loss": 1.5674, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.573943795068471e-05, | |
| "loss": 1.5274, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.4950430682006e-05, | |
| "loss": 1.503, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.410249979275041e-05, | |
| "loss": 1.5161, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.319814964780064e-05, | |
| "loss": 1.4715, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.2240051246377613e-05, | |
| "loss": 1.4587, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.123103433324129e-05, | |
| "loss": 1.3959, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.017407904103607e-05, | |
| "loss": 1.3988, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.907230708846552e-05, | |
| "loss": 1.3834, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.7928972560292554e-05, | |
| "loss": 1.4369, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.674745229639617e-05, | |
| "loss": 1.4171, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.553123591827093e-05, | |
| "loss": 1.4079, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.428391552242576e-05, | |
| "loss": 1.2868, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.300917507112261e-05, | |
| "loss": 1.3193, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.171077951178964e-05, | |
| "loss": 1.3231, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.039256365724447e-05, | |
| "loss": 1.3896, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.905842085957007e-05, | |
| "loss": 1.3857, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.7712291511094725e-05, | |
| "loss": 1.3863, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.6358151406438674e-05, | |
| "loss": 1.4821, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5e-05, | |
| "loss": 1.3522, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.3641848593561322e-05, | |
| "loss": 1.312, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.228770848890528e-05, | |
| "loss": 1.3254, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.0941579140429933e-05, | |
| "loss": 1.3726, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.960743634275554e-05, | |
| "loss": 1.2926, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.828922048821037e-05, | |
| "loss": 1.3776, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6990824928877393e-05, | |
| "loss": 1.4351, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.5716084477574245e-05, | |
| "loss": 1.3105, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4468764081729063e-05, | |
| "loss": 1.3345, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3252547703603829e-05, | |
| "loss": 1.3971, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.207102743970745e-05, | |
| "loss": 1.2962, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.0927692911534476e-05, | |
| "loss": 1.3572, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.825920958963936e-06, | |
| "loss": 1.318, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.768965666758704e-06, | |
| "loss": 1.3644, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.759948753622389e-06, | |
| "loss": 1.4429, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.801850352199371e-06, | |
| "loss": 1.3241, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.897500207249593e-06, | |
| "loss": 1.3219, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.049569317994013e-06, | |
| "loss": 1.2544, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.260562049315301e-06, | |
| "loss": 1.4059, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.5328087351131495e-06, | |
| "loss": 1.3633, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8684587956604e-06, | |
| "loss": 1.2363, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2694743892876873e-06, | |
| "loss": 1.3308, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7376246171465188e-06, | |
| "loss": 1.3525, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2744802981669217e-06, | |
| "loss": 1.3765, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.814093296418885e-07, | |
| "loss": 1.3155, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.595726471410961e-07, | |
| "loss": 1.3324, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.099207956863359e-07, | |
| "loss": 1.3114, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.3319112231567344e-07, | |
| "loss": 1.2908, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.990559832808659e-08, | |
| "loss": 1.3837, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 578, | |
| "total_flos": 8.80125790828626e+16, | |
| "train_loss": 1.4581370320699618, | |
| "train_runtime": 1932.4895, | |
| "train_samples_per_second": 4.782, | |
| "train_steps_per_second": 0.299 | |
| } | |
| ], | |
| "max_steps": 578, | |
| "num_train_epochs": 1, | |
| "total_flos": 8.80125790828626e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |