| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9904153354632586, |
| "eval_steps": 500, |
| "global_step": 78, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.038338658146964855, |
| "grad_norm": 6.268963813781738, |
| "learning_rate": 1.25e-06, |
| "loss": 1.2115, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.07667731629392971, |
| "grad_norm": 6.341134548187256, |
| "learning_rate": 2.5e-06, |
| "loss": 1.2297, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.11501597444089456, |
| "grad_norm": 6.060571670532227, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 1.2244, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.15335463258785942, |
| "grad_norm": 5.834828853607178, |
| "learning_rate": 5e-06, |
| "loss": 1.2321, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.19169329073482427, |
| "grad_norm": 4.031907081604004, |
| "learning_rate": 6.25e-06, |
| "loss": 1.1354, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.23003194888178913, |
| "grad_norm": 2.566441535949707, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 1.1373, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.268370607028754, |
| "grad_norm": 4.936546802520752, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 1.1164, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.30670926517571884, |
| "grad_norm": 5.281982421875, |
| "learning_rate": 1e-05, |
| "loss": 1.1487, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.3450479233226837, |
| "grad_norm": 4.784050464630127, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 1.1092, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.38338658146964855, |
| "grad_norm": 4.35185432434082, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 1.0593, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.4217252396166134, |
| "grad_norm": 3.066632032394409, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 1.0775, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.46006389776357826, |
| "grad_norm": 2.1742818355560303, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 1.0507, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.4984025559105431, |
| "grad_norm": 2.0748846530914307, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 1.0538, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.536741214057508, |
| "grad_norm": 1.7165900468826294, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 1.0135, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.5750798722044729, |
| "grad_norm": 1.1524348258972168, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.9451, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.6134185303514377, |
| "grad_norm": 1.1045109033584595, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.9612, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.6517571884984026, |
| "grad_norm": 1.0113556385040283, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.933, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.6900958466453674, |
| "grad_norm": 1.0217912197113037, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.952, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.7284345047923323, |
| "grad_norm": 0.9327982068061829, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.9445, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.7667731629392971, |
| "grad_norm": 1.1761642694473267, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.9118, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.805111821086262, |
| "grad_norm": 1.0297203063964844, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.9449, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.8434504792332268, |
| "grad_norm": 0.89028000831604, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.9324, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.8817891373801917, |
| "grad_norm": 0.9008014798164368, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.9171, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.9201277955271565, |
| "grad_norm": 0.811389684677124, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.8944, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.9584664536741214, |
| "grad_norm": 0.7690094113349915, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.9277, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.9968051118210862, |
| "grad_norm": 0.797437310218811, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.8892, |
| "step": 26 |
| }, |
| { |
| "epoch": 1.035143769968051, |
| "grad_norm": 2.0572807788848877, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 1.6425, |
| "step": 27 |
| }, |
| { |
| "epoch": 1.073482428115016, |
| "grad_norm": 0.8024719953536987, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.8882, |
| "step": 28 |
| }, |
| { |
| "epoch": 1.1118210862619808, |
| "grad_norm": 0.732552170753479, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.8174, |
| "step": 29 |
| }, |
| { |
| "epoch": 1.1501597444089458, |
| "grad_norm": 0.7644696831703186, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.8943, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.1884984025559104, |
| "grad_norm": 0.8279052972793579, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.9146, |
| "step": 31 |
| }, |
| { |
| "epoch": 1.2268370607028753, |
| "grad_norm": 0.6625746488571167, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.8061, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.2651757188498403, |
| "grad_norm": 0.8740842342376709, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.9283, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.3035143769968052, |
| "grad_norm": 1.0007681846618652, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.8699, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.34185303514377, |
| "grad_norm": 0.7899899482727051, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.8696, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.3801916932907348, |
| "grad_norm": 0.8275483846664429, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.883, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.4185303514376997, |
| "grad_norm": 0.6930125951766968, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.844, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.4568690095846646, |
| "grad_norm": 0.9502591490745544, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.9176, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.4952076677316293, |
| "grad_norm": 0.7643057703971863, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.8606, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.5335463258785942, |
| "grad_norm": 0.6755321025848389, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.8542, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.571884984025559, |
| "grad_norm": 0.6078510880470276, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.8272, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.610223642172524, |
| "grad_norm": 0.6227731704711914, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.8341, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.648562300319489, |
| "grad_norm": 0.6520214676856995, |
| "learning_rate": 5e-06, |
| "loss": 0.8258, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.6869009584664538, |
| "grad_norm": 0.5231408476829529, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.8092, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.7252396166134185, |
| "grad_norm": 0.5697768926620483, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.8257, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.7635782747603834, |
| "grad_norm": 0.600039005279541, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.9231, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.8019169329073481, |
| "grad_norm": 0.552922248840332, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.8637, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.840255591054313, |
| "grad_norm": 0.5337823629379272, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.8697, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.878594249201278, |
| "grad_norm": 0.543074369430542, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.8528, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.9169329073482428, |
| "grad_norm": 0.49268418550491333, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.8719, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.9552715654952078, |
| "grad_norm": 0.5731656551361084, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.8398, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.9936102236421727, |
| "grad_norm": 0.5479597449302673, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.8779, |
| "step": 52 |
| }, |
| { |
| "epoch": 2.0319488817891376, |
| "grad_norm": 1.617209792137146, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 1.4471, |
| "step": 53 |
| }, |
| { |
| "epoch": 2.070287539936102, |
| "grad_norm": 0.4556597173213959, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.8478, |
| "step": 54 |
| }, |
| { |
| "epoch": 2.108626198083067, |
| "grad_norm": 0.47189801931381226, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.6975, |
| "step": 55 |
| }, |
| { |
| "epoch": 2.146964856230032, |
| "grad_norm": 0.530375063419342, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.8813, |
| "step": 56 |
| }, |
| { |
| "epoch": 2.1853035143769968, |
| "grad_norm": 0.4751721918582916, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.7837, |
| "step": 57 |
| }, |
| { |
| "epoch": 2.2236421725239617, |
| "grad_norm": 0.4963734745979309, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.813, |
| "step": 58 |
| }, |
| { |
| "epoch": 2.2619808306709266, |
| "grad_norm": 0.4415941536426544, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.8443, |
| "step": 59 |
| }, |
| { |
| "epoch": 2.3003194888178915, |
| "grad_norm": 0.46203547716140747, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.8034, |
| "step": 60 |
| }, |
| { |
| "epoch": 2.3386581469648564, |
| "grad_norm": 0.46348705887794495, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.8218, |
| "step": 61 |
| }, |
| { |
| "epoch": 2.376996805111821, |
| "grad_norm": 0.5105081796646118, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.8328, |
| "step": 62 |
| }, |
| { |
| "epoch": 2.415335463258786, |
| "grad_norm": 0.4578910171985626, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.8163, |
| "step": 63 |
| }, |
| { |
| "epoch": 2.4536741214057507, |
| "grad_norm": 0.43735674023628235, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.8503, |
| "step": 64 |
| }, |
| { |
| "epoch": 2.4920127795527156, |
| "grad_norm": 0.4174916744232178, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.8144, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.5303514376996805, |
| "grad_norm": 0.4164718985557556, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.8012, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.5686900958466454, |
| "grad_norm": 0.4239489734172821, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.8369, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.6070287539936103, |
| "grad_norm": 0.4047086834907532, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.8044, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.6453674121405752, |
| "grad_norm": 0.46145617961883545, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.8484, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.68370607028754, |
| "grad_norm": 0.40475496649742126, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.8267, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.722044728434505, |
| "grad_norm": 0.40019193291664124, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.7495, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.7603833865814695, |
| "grad_norm": 0.5259660482406616, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.8841, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.7987220447284344, |
| "grad_norm": 0.40922901034355164, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.7166, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.8370607028753994, |
| "grad_norm": 0.4288291335105896, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.8212, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.8753993610223643, |
| "grad_norm": 0.48746126890182495, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.8516, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.913738019169329, |
| "grad_norm": 0.41395363211631775, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.7457, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.952076677316294, |
| "grad_norm": 0.46223482489585876, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.8133, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "grad_norm": 0.3912806212902069, |
| "learning_rate": 0.0, |
| "loss": 0.7707, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "step": 78, |
| "total_flos": 74381350977536.0, |
| "train_loss": 0.9216354114887042, |
| "train_runtime": 4315.2973, |
| "train_samples_per_second": 1.738, |
| "train_steps_per_second": 0.018 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 78, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 74381350977536.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|