| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 279, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.010752688172043012, |
| "grad_norm": 5.117406670103657, |
| "learning_rate": 0.0, |
| "loss": 0.8565, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.021505376344086023, |
| "grad_norm": 3.932349183270455, |
| "learning_rate": 3.5714285714285716e-07, |
| "loss": 0.7654, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03225806451612903, |
| "grad_norm": 4.430618628715762, |
| "learning_rate": 7.142857142857143e-07, |
| "loss": 0.8602, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.043010752688172046, |
| "grad_norm": 4.629935061064256, |
| "learning_rate": 1.0714285714285714e-06, |
| "loss": 0.8179, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.053763440860215055, |
| "grad_norm": 4.607689206667246, |
| "learning_rate": 1.4285714285714286e-06, |
| "loss": 0.7984, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.06451612903225806, |
| "grad_norm": 4.470859498641417, |
| "learning_rate": 1.7857142857142859e-06, |
| "loss": 0.8393, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.07526881720430108, |
| "grad_norm": 3.615495688035928, |
| "learning_rate": 2.1428571428571427e-06, |
| "loss": 0.7408, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.08602150537634409, |
| "grad_norm": 3.6184582310708753, |
| "learning_rate": 2.5e-06, |
| "loss": 0.7991, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0967741935483871, |
| "grad_norm": 3.5286647768187804, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 0.7439, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.10752688172043011, |
| "grad_norm": 3.2631493323575773, |
| "learning_rate": 3.2142857142857147e-06, |
| "loss": 0.7656, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.11827956989247312, |
| "grad_norm": 1.9183483696552464, |
| "learning_rate": 3.5714285714285718e-06, |
| "loss": 0.6424, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.12903225806451613, |
| "grad_norm": 2.5758188598022036, |
| "learning_rate": 3.928571428571429e-06, |
| "loss": 0.6746, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.13978494623655913, |
| "grad_norm": 2.711107640562582, |
| "learning_rate": 4.2857142857142855e-06, |
| "loss": 0.6786, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.15053763440860216, |
| "grad_norm": 2.2557127395574335, |
| "learning_rate": 4.642857142857144e-06, |
| "loss": 0.6464, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.16129032258064516, |
| "grad_norm": 2.08589822549775, |
| "learning_rate": 5e-06, |
| "loss": 0.5924, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.17204301075268819, |
| "grad_norm": 2.442258395861131, |
| "learning_rate": 5.357142857142857e-06, |
| "loss": 0.6771, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.1827956989247312, |
| "grad_norm": 2.4088790285819353, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 0.6609, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.1935483870967742, |
| "grad_norm": 2.599278835350385, |
| "learning_rate": 6.071428571428571e-06, |
| "loss": 0.665, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.20430107526881722, |
| "grad_norm": 2.135693738895623, |
| "learning_rate": 6.4285714285714295e-06, |
| "loss": 0.5946, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.21505376344086022, |
| "grad_norm": 2.0880720029245343, |
| "learning_rate": 6.785714285714287e-06, |
| "loss": 0.6027, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.22580645161290322, |
| "grad_norm": 1.7878249901331582, |
| "learning_rate": 7.1428571428571436e-06, |
| "loss": 0.6523, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.23655913978494625, |
| "grad_norm": 1.8499308052576442, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.5775, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.24731182795698925, |
| "grad_norm": 1.545055962271408, |
| "learning_rate": 7.857142857142858e-06, |
| "loss": 0.5343, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.25806451612903225, |
| "grad_norm": 1.5990557909426337, |
| "learning_rate": 8.214285714285714e-06, |
| "loss": 0.6303, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.26881720430107525, |
| "grad_norm": 1.5047883914372557, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 0.5673, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.27956989247311825, |
| "grad_norm": 1.660959104374015, |
| "learning_rate": 8.92857142857143e-06, |
| "loss": 0.5834, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2903225806451613, |
| "grad_norm": 1.4855731115284816, |
| "learning_rate": 9.285714285714288e-06, |
| "loss": 0.5861, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3010752688172043, |
| "grad_norm": 1.5890489850514156, |
| "learning_rate": 9.642857142857144e-06, |
| "loss": 0.58, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.3118279569892473, |
| "grad_norm": 1.3375504302217296, |
| "learning_rate": 1e-05, |
| "loss": 0.5669, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.3225806451612903, |
| "grad_norm": 1.4010773743864648, |
| "learning_rate": 9.999608360361114e-06, |
| "loss": 0.551, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 1.3303386154338483, |
| "learning_rate": 9.998433502797097e-06, |
| "loss": 0.6109, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.34408602150537637, |
| "grad_norm": 1.3895786100086411, |
| "learning_rate": 9.996475611356265e-06, |
| "loss": 0.5155, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.3548387096774194, |
| "grad_norm": 1.211244402418308, |
| "learning_rate": 9.993734992753777e-06, |
| "loss": 0.5167, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3655913978494624, |
| "grad_norm": 1.205621464067494, |
| "learning_rate": 9.990212076323587e-06, |
| "loss": 0.626, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.3763440860215054, |
| "grad_norm": 1.281327619636724, |
| "learning_rate": 9.98590741395118e-06, |
| "loss": 0.5301, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.3870967741935484, |
| "grad_norm": 1.4425953234642075, |
| "learning_rate": 9.980821679987125e-06, |
| "loss": 0.5705, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3978494623655914, |
| "grad_norm": 1.3476830260596382, |
| "learning_rate": 9.974955671141425e-06, |
| "loss": 0.5742, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.40860215053763443, |
| "grad_norm": 1.3027265736180367, |
| "learning_rate": 9.968310306358715e-06, |
| "loss": 0.5443, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.41935483870967744, |
| "grad_norm": 1.3618324591712858, |
| "learning_rate": 9.960886626674302e-06, |
| "loss": 0.6328, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.43010752688172044, |
| "grad_norm": 1.3924706399344635, |
| "learning_rate": 9.952685795051078e-06, |
| "loss": 0.531, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.44086021505376344, |
| "grad_norm": 1.3681689870094362, |
| "learning_rate": 9.943709096197334e-06, |
| "loss": 0.5771, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.45161290322580644, |
| "grad_norm": 1.169520090357751, |
| "learning_rate": 9.933957936365515e-06, |
| "loss": 0.6155, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.46236559139784944, |
| "grad_norm": 1.2592535480126792, |
| "learning_rate": 9.9234338431319e-06, |
| "loss": 0.5614, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.4731182795698925, |
| "grad_norm": 1.188489938742699, |
| "learning_rate": 9.912138465157325e-06, |
| "loss": 0.5821, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.4838709677419355, |
| "grad_norm": 1.1821647113837632, |
| "learning_rate": 9.900073571928887e-06, |
| "loss": 0.5793, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.4946236559139785, |
| "grad_norm": 1.363884821931327, |
| "learning_rate": 9.887241053482756e-06, |
| "loss": 0.5629, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5053763440860215, |
| "grad_norm": 1.3092899719841664, |
| "learning_rate": 9.87364292010809e-06, |
| "loss": 0.5235, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5161290322580645, |
| "grad_norm": 1.3352852678722897, |
| "learning_rate": 9.859281302032107e-06, |
| "loss": 0.5151, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.5268817204301075, |
| "grad_norm": 1.2505866887814554, |
| "learning_rate": 9.844158449086372e-06, |
| "loss": 0.5452, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.5376344086021505, |
| "grad_norm": 1.4936981012829829, |
| "learning_rate": 9.828276730354353e-06, |
| "loss": 0.5463, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.5483870967741935, |
| "grad_norm": 1.2994897728299255, |
| "learning_rate": 9.811638633800287e-06, |
| "loss": 0.524, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.5591397849462365, |
| "grad_norm": 1.1528734529208322, |
| "learning_rate": 9.794246765879421e-06, |
| "loss": 0.5134, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.5698924731182796, |
| "grad_norm": 1.3662191872593852, |
| "learning_rate": 9.776103851129706e-06, |
| "loss": 0.5074, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.5806451612903226, |
| "grad_norm": 1.3197245386738852, |
| "learning_rate": 9.757212731744973e-06, |
| "loss": 0.4936, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.5913978494623656, |
| "grad_norm": 1.1826061568448532, |
| "learning_rate": 9.737576367129694e-06, |
| "loss": 0.5291, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.6021505376344086, |
| "grad_norm": 1.2279962807050635, |
| "learning_rate": 9.717197833435367e-06, |
| "loss": 0.5401, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.6129032258064516, |
| "grad_norm": 1.1622891891860894, |
| "learning_rate": 9.696080323078621e-06, |
| "loss": 0.5341, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.6236559139784946, |
| "grad_norm": 1.15595461480515, |
| "learning_rate": 9.67422714424111e-06, |
| "loss": 0.4811, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.6344086021505376, |
| "grad_norm": 1.1381808394142663, |
| "learning_rate": 9.651641720351262e-06, |
| "loss": 0.5126, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.6451612903225806, |
| "grad_norm": 1.2816655929131089, |
| "learning_rate": 9.628327589547977e-06, |
| "loss": 0.5147, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.6559139784946236, |
| "grad_norm": 1.176343266853311, |
| "learning_rate": 9.604288404126362e-06, |
| "loss": 0.533, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 1.3546093456513846, |
| "learning_rate": 9.579527929965581e-06, |
| "loss": 0.5972, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.6774193548387096, |
| "grad_norm": 1.3159036668873394, |
| "learning_rate": 9.554050045938893e-06, |
| "loss": 0.5211, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.6881720430107527, |
| "grad_norm": 1.3256416815543717, |
| "learning_rate": 9.52785874330602e-06, |
| "loss": 0.5664, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.6989247311827957, |
| "grad_norm": 1.137833912080069, |
| "learning_rate": 9.500958125087882e-06, |
| "loss": 0.52, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.7096774193548387, |
| "grad_norm": 1.3219928059082766, |
| "learning_rate": 9.473352405423845e-06, |
| "loss": 0.5192, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.7204301075268817, |
| "grad_norm": 1.2000849146820651, |
| "learning_rate": 9.445045908911536e-06, |
| "loss": 0.5093, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.7311827956989247, |
| "grad_norm": 1.0948499067842512, |
| "learning_rate": 9.416043069929389e-06, |
| "loss": 0.4891, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.7419354838709677, |
| "grad_norm": 1.1326577666179933, |
| "learning_rate": 9.386348431941953e-06, |
| "loss": 0.4979, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.7526881720430108, |
| "grad_norm": 1.1971786924652708, |
| "learning_rate": 9.355966646788152e-06, |
| "loss": 0.4716, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.7634408602150538, |
| "grad_norm": 1.1712532314666788, |
| "learning_rate": 9.324902473952529e-06, |
| "loss": 0.4697, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.7741935483870968, |
| "grad_norm": 1.174242198418526, |
| "learning_rate": 9.293160779819658e-06, |
| "loss": 0.5306, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.7849462365591398, |
| "grad_norm": 1.2287418966719115, |
| "learning_rate": 9.260746536911792e-06, |
| "loss": 0.5267, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.7956989247311828, |
| "grad_norm": 1.3303630719030897, |
| "learning_rate": 9.227664823109884e-06, |
| "loss": 0.5243, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.8064516129032258, |
| "grad_norm": 1.3096448496558815, |
| "learning_rate": 9.193920820858113e-06, |
| "loss": 0.4986, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.8172043010752689, |
| "grad_norm": 1.3030805645002468, |
| "learning_rate": 9.159519816352021e-06, |
| "loss": 0.5732, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.8279569892473119, |
| "grad_norm": 1.2151108401849033, |
| "learning_rate": 9.124467198710401e-06, |
| "loss": 0.5374, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.8387096774193549, |
| "grad_norm": 1.2507532704479085, |
| "learning_rate": 9.08876845913106e-06, |
| "loss": 0.5251, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.8494623655913979, |
| "grad_norm": 1.397611316956257, |
| "learning_rate": 9.052429190030589e-06, |
| "loss": 0.5325, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.8602150537634409, |
| "grad_norm": 1.2260544279462335, |
| "learning_rate": 9.015455084168279e-06, |
| "loss": 0.5008, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.8709677419354839, |
| "grad_norm": 1.219464661892977, |
| "learning_rate": 8.977851933754317e-06, |
| "loss": 0.5294, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.8817204301075269, |
| "grad_norm": 1.2576277212991538, |
| "learning_rate": 8.939625629542401e-06, |
| "loss": 0.5065, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.8924731182795699, |
| "grad_norm": 1.2231909063495303, |
| "learning_rate": 8.900782159906927e-06, |
| "loss": 0.5577, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.9032258064516129, |
| "grad_norm": 1.240487970070817, |
| "learning_rate": 8.861327609904859e-06, |
| "loss": 0.5607, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.9139784946236559, |
| "grad_norm": 1.1803222245984772, |
| "learning_rate": 8.821268160322482e-06, |
| "loss": 0.4559, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.9247311827956989, |
| "grad_norm": 1.1172971063227777, |
| "learning_rate": 8.780610086707149e-06, |
| "loss": 0.4524, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.9354838709677419, |
| "grad_norm": 1.1952992924028192, |
| "learning_rate": 8.739359758384162e-06, |
| "loss": 0.5183, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.946236559139785, |
| "grad_norm": 1.2423449341948143, |
| "learning_rate": 8.697523637458997e-06, |
| "loss": 0.6781, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.956989247311828, |
| "grad_norm": 1.223456106162861, |
| "learning_rate": 8.655108277804975e-06, |
| "loss": 0.4719, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.967741935483871, |
| "grad_norm": 1.2180325223266957, |
| "learning_rate": 8.612120324036548e-06, |
| "loss": 0.5298, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.978494623655914, |
| "grad_norm": 1.1908812611967874, |
| "learning_rate": 8.568566510468392e-06, |
| "loss": 0.4908, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.989247311827957, |
| "grad_norm": 1.1329246631137397, |
| "learning_rate": 8.524453660060434e-06, |
| "loss": 0.4884, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.1739661223625193, |
| "learning_rate": 8.479788683348996e-06, |
| "loss": 0.5282, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.010752688172043, |
| "grad_norm": 1.2895603002016933, |
| "learning_rate": 8.434578577364218e-06, |
| "loss": 0.4053, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.021505376344086, |
| "grad_norm": 1.2372418394561484, |
| "learning_rate": 8.388830424533935e-06, |
| "loss": 0.337, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.032258064516129, |
| "grad_norm": 1.1718375574305269, |
| "learning_rate": 8.342551391574165e-06, |
| "loss": 0.3569, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.043010752688172, |
| "grad_norm": 1.2444221432485463, |
| "learning_rate": 8.295748728366414e-06, |
| "loss": 0.3488, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.053763440860215, |
| "grad_norm": 1.5344510346454978, |
| "learning_rate": 8.248429766821925e-06, |
| "loss": 0.387, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.064516129032258, |
| "grad_norm": 1.83437388566894, |
| "learning_rate": 8.200601919733106e-06, |
| "loss": 0.3476, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.075268817204301, |
| "grad_norm": 1.1645823937573538, |
| "learning_rate": 8.15227267961226e-06, |
| "loss": 0.338, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.086021505376344, |
| "grad_norm": 1.07943356765298, |
| "learning_rate": 8.10344961751785e-06, |
| "loss": 0.3756, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.096774193548387, |
| "grad_norm": 1.136359022672489, |
| "learning_rate": 8.054140381868435e-06, |
| "loss": 0.3643, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.10752688172043, |
| "grad_norm": 1.1334256185564782, |
| "learning_rate": 8.004352697244516e-06, |
| "loss": 0.3537, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.118279569892473, |
| "grad_norm": 1.272152321916641, |
| "learning_rate": 7.954094363178421e-06, |
| "loss": 0.3851, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.129032258064516, |
| "grad_norm": 1.6926984652817842, |
| "learning_rate": 7.903373252932474e-06, |
| "loss": 0.3236, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.139784946236559, |
| "grad_norm": 1.1769426909749126, |
| "learning_rate": 7.852197312265592e-06, |
| "loss": 0.4107, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.1505376344086022, |
| "grad_norm": 1.2179134329370982, |
| "learning_rate": 7.800574558188548e-06, |
| "loss": 0.3655, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.1612903225806452, |
| "grad_norm": 1.1914193859243891, |
| "learning_rate": 7.748513077708044e-06, |
| "loss": 0.319, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.1720430107526882, |
| "grad_norm": 1.1623447008406969, |
| "learning_rate": 7.69602102655985e-06, |
| "loss": 0.3257, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.1827956989247312, |
| "grad_norm": 1.219276989748195, |
| "learning_rate": 7.643106627931148e-06, |
| "loss": 0.5513, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.1935483870967742, |
| "grad_norm": 1.246553411484507, |
| "learning_rate": 7.5897781711723215e-06, |
| "loss": 0.3581, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.2043010752688172, |
| "grad_norm": 1.1736474380398347, |
| "learning_rate": 7.536044010498396e-06, |
| "loss": 0.3513, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.2150537634408602, |
| "grad_norm": 1.2797115429626638, |
| "learning_rate": 7.48191256368028e-06, |
| "loss": 0.3443, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.2258064516129032, |
| "grad_norm": 1.197978350617973, |
| "learning_rate": 7.427392310726088e-06, |
| "loss": 0.3603, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.2365591397849462, |
| "grad_norm": 1.1727521029345562, |
| "learning_rate": 7.372491792552694e-06, |
| "loss": 0.3738, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.2473118279569892, |
| "grad_norm": 1.1707826597566307, |
| "learning_rate": 7.31721960964774e-06, |
| "loss": 0.3881, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.2580645161290323, |
| "grad_norm": 1.2094231500149508, |
| "learning_rate": 7.261584420722328e-06, |
| "loss": 0.5288, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.2688172043010753, |
| "grad_norm": 1.1809429787295744, |
| "learning_rate": 7.20559494135458e-06, |
| "loss": 0.3781, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.2795698924731183, |
| "grad_norm": 1.1434463967884962, |
| "learning_rate": 7.149259942624287e-06, |
| "loss": 0.3717, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.2903225806451613, |
| "grad_norm": 1.1430462929379024, |
| "learning_rate": 7.092588249738871e-06, |
| "loss": 0.373, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.3010752688172043, |
| "grad_norm": 1.4627760719280838, |
| "learning_rate": 7.03558874065087e-06, |
| "loss": 0.3809, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.3118279569892473, |
| "grad_norm": 1.182317091439007, |
| "learning_rate": 6.978270344667143e-06, |
| "loss": 0.3583, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.3225806451612903, |
| "grad_norm": 1.243697527950775, |
| "learning_rate": 6.920642041050055e-06, |
| "loss": 0.3207, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 1.2759794387408534, |
| "learning_rate": 6.862712857610812e-06, |
| "loss": 0.3633, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.3440860215053765, |
| "grad_norm": 1.1295106977198248, |
| "learning_rate": 6.804491869295207e-06, |
| "loss": 0.4274, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.3548387096774195, |
| "grad_norm": 1.1901126832185107, |
| "learning_rate": 6.745988196761976e-06, |
| "loss": 0.3779, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.3655913978494625, |
| "grad_norm": 1.3373142316442945, |
| "learning_rate": 6.687211004953992e-06, |
| "loss": 0.3618, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.3763440860215055, |
| "grad_norm": 1.116240494926489, |
| "learning_rate": 6.628169501662527e-06, |
| "loss": 0.3347, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.3870967741935485, |
| "grad_norm": 1.3073080378383377, |
| "learning_rate": 6.568872936084789e-06, |
| "loss": 0.3924, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.3978494623655915, |
| "grad_norm": 1.1658881438715565, |
| "learning_rate": 6.509330597374993e-06, |
| "loss": 0.3349, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.4086021505376345, |
| "grad_norm": 1.3285197694874702, |
| "learning_rate": 6.44955181318915e-06, |
| "loss": 0.383, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.4193548387096775, |
| "grad_norm": 1.1421963318126769, |
| "learning_rate": 6.389545948223841e-06, |
| "loss": 0.35, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.4301075268817205, |
| "grad_norm": 1.1800566601708844, |
| "learning_rate": 6.329322402749181e-06, |
| "loss": 0.36, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.4408602150537635, |
| "grad_norm": 1.181427392470128, |
| "learning_rate": 6.2688906111362115e-06, |
| "loss": 0.366, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.4516129032258065, |
| "grad_norm": 1.155888875463247, |
| "learning_rate": 6.208260040378946e-06, |
| "loss": 0.3497, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.4623655913978495, |
| "grad_norm": 1.1786815694111499, |
| "learning_rate": 6.147440188611324e-06, |
| "loss": 0.3537, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.4731182795698925, |
| "grad_norm": 1.2051193700127212, |
| "learning_rate": 6.0864405836192575e-06, |
| "loss": 0.3527, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.4838709677419355, |
| "grad_norm": 1.2284826404113511, |
| "learning_rate": 6.025270781348055e-06, |
| "loss": 0.3378, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.4946236559139785, |
| "grad_norm": 1.247829453357803, |
| "learning_rate": 5.963940364405425e-06, |
| "loss": 0.3696, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.5053763440860215, |
| "grad_norm": 1.201613514704844, |
| "learning_rate": 5.902458940560304e-06, |
| "loss": 0.3738, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.5161290322580645, |
| "grad_norm": 1.169777798872826, |
| "learning_rate": 5.8408361412377475e-06, |
| "loss": 0.3335, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.5268817204301075, |
| "grad_norm": 1.122966037731356, |
| "learning_rate": 5.779081620010104e-06, |
| "loss": 0.3232, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.5376344086021505, |
| "grad_norm": 1.194793658559918, |
| "learning_rate": 5.717205051084731e-06, |
| "loss": 0.3392, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.5483870967741935, |
| "grad_norm": 1.2783006705254338, |
| "learning_rate": 5.655216127788472e-06, |
| "loss": 0.3711, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.5591397849462365, |
| "grad_norm": 1.128328576854692, |
| "learning_rate": 5.593124561049141e-06, |
| "loss": 0.3435, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.5698924731182795, |
| "grad_norm": 1.191976250642684, |
| "learning_rate": 5.530940077874248e-06, |
| "loss": 0.3617, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.5806451612903225, |
| "grad_norm": 1.1652853933490293, |
| "learning_rate": 5.468672419827208e-06, |
| "loss": 0.3399, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.5913978494623655, |
| "grad_norm": 1.0915560758197622, |
| "learning_rate": 5.406331341501264e-06, |
| "loss": 0.339, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.6021505376344085, |
| "grad_norm": 1.1053574420353864, |
| "learning_rate": 5.34392660899138e-06, |
| "loss": 0.3207, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.6129032258064515, |
| "grad_norm": 1.1460690041968786, |
| "learning_rate": 5.281467998364314e-06, |
| "loss": 0.3308, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.6236559139784945, |
| "grad_norm": 1.1570813052418365, |
| "learning_rate": 5.218965294127155e-06, |
| "loss": 0.3501, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.6344086021505375, |
| "grad_norm": 1.2691321046241564, |
| "learning_rate": 5.156428287694508e-06, |
| "loss": 0.3542, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.6451612903225805, |
| "grad_norm": 1.1765795742693084, |
| "learning_rate": 5.093866775854618e-06, |
| "loss": 0.334, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.6559139784946235, |
| "grad_norm": 1.114227715473016, |
| "learning_rate": 5.03129055923465e-06, |
| "loss": 0.3124, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 1.2728424503735456, |
| "learning_rate": 4.968709440765352e-06, |
| "loss": 0.3608, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.6774193548387095, |
| "grad_norm": 1.2957106875665612, |
| "learning_rate": 4.906133224145384e-06, |
| "loss": 0.4535, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.6881720430107527, |
| "grad_norm": 1.1832693336037738, |
| "learning_rate": 4.843571712305493e-06, |
| "loss": 0.347, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.6989247311827957, |
| "grad_norm": 1.1976541159478242, |
| "learning_rate": 4.781034705872846e-06, |
| "loss": 0.3429, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.7096774193548387, |
| "grad_norm": 1.1965466685156962, |
| "learning_rate": 4.7185320016356865e-06, |
| "loss": 0.353, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.7204301075268817, |
| "grad_norm": 1.3091493751262704, |
| "learning_rate": 4.656073391008622e-06, |
| "loss": 0.3955, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.7311827956989247, |
| "grad_norm": 1.1853179338764237, |
| "learning_rate": 4.593668658498737e-06, |
| "loss": 0.3522, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.7419354838709677, |
| "grad_norm": 1.0940886869833208, |
| "learning_rate": 4.531327580172794e-06, |
| "loss": 0.3609, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.7526881720430108, |
| "grad_norm": 1.241588019056234, |
| "learning_rate": 4.469059922125753e-06, |
| "loss": 0.3518, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.7634408602150538, |
| "grad_norm": 1.3864224154338305, |
| "learning_rate": 4.4068754389508616e-06, |
| "loss": 0.4151, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.7741935483870968, |
| "grad_norm": 1.2098619933759385, |
| "learning_rate": 4.34478387221153e-06, |
| "loss": 0.348, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.7849462365591398, |
| "grad_norm": 1.2016121769392047, |
| "learning_rate": 4.282794948915271e-06, |
| "loss": 0.3336, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.7956989247311828, |
| "grad_norm": 1.2364712139446496, |
| "learning_rate": 4.220918379989898e-06, |
| "loss": 0.3815, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.8064516129032258, |
| "grad_norm": 1.1677279665442852, |
| "learning_rate": 4.159163858762255e-06, |
| "loss": 0.3284, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.817204301075269, |
| "grad_norm": 1.2119363388446562, |
| "learning_rate": 4.097541059439698e-06, |
| "loss": 0.3662, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.827956989247312, |
| "grad_norm": 1.1480161774722477, |
| "learning_rate": 4.036059635594578e-06, |
| "loss": 0.3589, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.838709677419355, |
| "grad_norm": 1.2274813656457675, |
| "learning_rate": 3.974729218651946e-06, |
| "loss": 0.3249, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.849462365591398, |
| "grad_norm": 1.2174884876788425, |
| "learning_rate": 3.913559416380743e-06, |
| "loss": 0.3605, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.860215053763441, |
| "grad_norm": 1.2173044748787643, |
| "learning_rate": 3.852559811388676e-06, |
| "loss": 0.3622, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.870967741935484, |
| "grad_norm": 1.0993262002539315, |
| "learning_rate": 3.791739959621054e-06, |
| "loss": 0.3159, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.881720430107527, |
| "grad_norm": 1.1781627168157496, |
| "learning_rate": 3.7311093888637906e-06, |
| "loss": 0.37, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.89247311827957, |
| "grad_norm": 1.120376466539403, |
| "learning_rate": 3.670677597250819e-06, |
| "loss": 0.3628, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.903225806451613, |
| "grad_norm": 1.1144596517150382, |
| "learning_rate": 3.6104540517761594e-06, |
| "loss": 0.3525, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.913978494623656, |
| "grad_norm": 1.1541300654588889, |
| "learning_rate": 3.55044818681085e-06, |
| "loss": 0.3224, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.924731182795699, |
| "grad_norm": 1.2475568699772912, |
| "learning_rate": 3.4906694026250075e-06, |
| "loss": 0.3474, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.935483870967742, |
| "grad_norm": 1.1498672361648068, |
| "learning_rate": 3.431127063915213e-06, |
| "loss": 0.3339, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.946236559139785, |
| "grad_norm": 1.2308326285216238, |
| "learning_rate": 3.371830498337475e-06, |
| "loss": 0.3123, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.956989247311828, |
| "grad_norm": 1.1895143093091571, |
| "learning_rate": 3.3127889950460094e-06, |
| "loss": 0.343, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.967741935483871, |
| "grad_norm": 1.1762159546277886, |
| "learning_rate": 3.254011803238026e-06, |
| "loss": 0.3402, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.978494623655914, |
| "grad_norm": 1.153653621278308, |
| "learning_rate": 3.195508130704795e-06, |
| "loss": 0.3766, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.989247311827957, |
| "grad_norm": 1.2087190097752418, |
| "learning_rate": 3.1372871423891894e-06, |
| "loss": 0.3827, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.089032118871395, |
| "learning_rate": 3.079357958949946e-06, |
| "loss": 0.3326, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.010752688172043, |
| "grad_norm": 1.2353249862535516, |
| "learning_rate": 3.021729655332858e-06, |
| "loss": 0.2291, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.021505376344086, |
| "grad_norm": 1.1205213073958726, |
| "learning_rate": 2.9644112593491315e-06, |
| "loss": 0.2793, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.032258064516129, |
| "grad_norm": 1.0679682742372525, |
| "learning_rate": 2.90741175026113e-06, |
| "loss": 0.2318, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.043010752688172, |
| "grad_norm": 1.0416057740465434, |
| "learning_rate": 2.850740057375716e-06, |
| "loss": 0.2268, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.053763440860215, |
| "grad_norm": 1.1650862570167908, |
| "learning_rate": 2.7944050586454215e-06, |
| "loss": 0.2683, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.064516129032258, |
| "grad_norm": 1.0439734734555002, |
| "learning_rate": 2.7384155792776724e-06, |
| "loss": 0.2025, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.075268817204301, |
| "grad_norm": 1.0872608821496301, |
| "learning_rate": 2.682780390352262e-06, |
| "loss": 0.2393, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.086021505376344, |
| "grad_norm": 1.1996863353483587, |
| "learning_rate": 2.627508207447308e-06, |
| "loss": 0.2294, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.096774193548387, |
| "grad_norm": 1.2259184666219345, |
| "learning_rate": 2.5726076892739127e-06, |
| "loss": 0.2282, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.10752688172043, |
| "grad_norm": 1.169799186561526, |
| "learning_rate": 2.5180874363197217e-06, |
| "loss": 0.4074, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.118279569892473, |
| "grad_norm": 1.2450757774784624, |
| "learning_rate": 2.463955989501607e-06, |
| "loss": 0.2049, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.129032258064516, |
| "grad_norm": 1.2928234399217249, |
| "learning_rate": 2.41022182882768e-06, |
| "loss": 0.2867, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.139784946236559, |
| "grad_norm": 1.4079638381400967, |
| "learning_rate": 2.356893372068855e-06, |
| "loss": 0.2632, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.150537634408602, |
| "grad_norm": 1.2856070305392429, |
| "learning_rate": 2.3039789734401524e-06, |
| "loss": 0.2136, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.161290322580645, |
| "grad_norm": 1.257947749337111, |
| "learning_rate": 2.251486922291957e-06, |
| "loss": 0.2032, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.172043010752688, |
| "grad_norm": 1.2185848133318558, |
| "learning_rate": 2.1994254418114524e-06, |
| "loss": 0.2516, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.182795698924731, |
| "grad_norm": 1.1801170107899257, |
| "learning_rate": 2.147802687734409e-06, |
| "loss": 0.1965, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.193548387096774, |
| "grad_norm": 1.0247500981424174, |
| "learning_rate": 2.0966267470675273e-06, |
| "loss": 0.198, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.204301075268817, |
| "grad_norm": 1.16238195309318, |
| "learning_rate": 2.0459056368215786e-06, |
| "loss": 0.3509, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.21505376344086, |
| "grad_norm": 1.034172470881347, |
| "learning_rate": 1.9956473027554846e-06, |
| "loss": 0.2416, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.225806451612903, |
| "grad_norm": 1.2244901538313122, |
| "learning_rate": 1.9458596181315643e-06, |
| "loss": 0.2525, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.236559139784946, |
| "grad_norm": 1.0823123036421982, |
| "learning_rate": 1.8965503824821496e-06, |
| "loss": 0.2103, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.247311827956989, |
| "grad_norm": 1.1187165499095806, |
| "learning_rate": 1.84772732038774e-06, |
| "loss": 0.2403, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.258064516129032, |
| "grad_norm": 1.0442209176999298, |
| "learning_rate": 1.7993980802668947e-06, |
| "loss": 0.2059, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.268817204301075, |
| "grad_norm": 1.039636339404873, |
| "learning_rate": 1.7515702331780753e-06, |
| "loss": 0.2106, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.279569892473118, |
| "grad_norm": 0.971952637708957, |
| "learning_rate": 1.7042512716335873e-06, |
| "loss": 0.1833, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.2903225806451615, |
| "grad_norm": 1.0335059735288603, |
| "learning_rate": 1.6574486084258369e-06, |
| "loss": 0.2071, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.3010752688172045, |
| "grad_norm": 1.1712890998348404, |
| "learning_rate": 1.6111695754660667e-06, |
| "loss": 0.2277, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.3118279569892475, |
| "grad_norm": 1.0093534018268129, |
| "learning_rate": 1.5654214226357822e-06, |
| "loss": 0.2318, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.3225806451612905, |
| "grad_norm": 1.1389615413860168, |
| "learning_rate": 1.5202113166510058e-06, |
| "loss": 0.1953, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.3333333333333335, |
| "grad_norm": 1.246098734753724, |
| "learning_rate": 1.475546339939568e-06, |
| "loss": 0.2623, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.3440860215053765, |
| "grad_norm": 1.1558696943090916, |
| "learning_rate": 1.4314334895316095e-06, |
| "loss": 0.2445, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.3548387096774195, |
| "grad_norm": 1.2104135716011495, |
| "learning_rate": 1.3878796759634544e-06, |
| "loss": 0.2306, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.3655913978494625, |
| "grad_norm": 1.2234866527768256, |
| "learning_rate": 1.3448917221950264e-06, |
| "loss": 0.2374, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.3763440860215055, |
| "grad_norm": 1.0431434616717088, |
| "learning_rate": 1.3024763625410025e-06, |
| "loss": 0.2015, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.3870967741935485, |
| "grad_norm": 1.0687881028758524, |
| "learning_rate": 1.2606402416158391e-06, |
| "loss": 0.2067, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.3978494623655915, |
| "grad_norm": 1.134243323797628, |
| "learning_rate": 1.2193899132928539e-06, |
| "loss": 0.2183, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.4086021505376345, |
| "grad_norm": 1.0372879190394384, |
| "learning_rate": 1.1787318396775188e-06, |
| "loss": 0.1999, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.4193548387096775, |
| "grad_norm": 1.1665054400030295, |
| "learning_rate": 1.138672390095143e-06, |
| "loss": 0.2359, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.4301075268817205, |
| "grad_norm": 1.0236202702671575, |
| "learning_rate": 1.0992178400930753e-06, |
| "loss": 0.2212, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.4408602150537635, |
| "grad_norm": 1.0558233259176077, |
| "learning_rate": 1.0603743704575992e-06, |
| "loss": 0.2345, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.4516129032258065, |
| "grad_norm": 1.0155424223780078, |
| "learning_rate": 1.0221480662456845e-06, |
| "loss": 0.2339, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.4623655913978495, |
| "grad_norm": 1.1436634892190285, |
| "learning_rate": 9.845449158317216e-07, |
| "loss": 0.2091, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.4731182795698925, |
| "grad_norm": 1.0556139362512549, |
| "learning_rate": 9.475708099694125e-07, |
| "loss": 0.2178, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.4838709677419355, |
| "grad_norm": 1.0522117165285618, |
| "learning_rate": 9.112315408689415e-07, |
| "loss": 0.4077, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.4946236559139785, |
| "grad_norm": 1.181359684461707, |
| "learning_rate": 8.755328012896002e-07, |
| "loss": 0.2357, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.5053763440860215, |
| "grad_norm": 1.0897935630524107, |
| "learning_rate": 8.404801836479809e-07, |
| "loss": 0.2112, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.5161290322580645, |
| "grad_norm": 1.2518780229754654, |
| "learning_rate": 8.060791791418887e-07, |
| "loss": 0.2562, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.5268817204301075, |
| "grad_norm": 1.1188351065056708, |
| "learning_rate": 7.723351768901172e-07, |
| "loss": 0.2323, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.5376344086021505, |
| "grad_norm": 1.065402387910212, |
| "learning_rate": 7.392534630882092e-07, |
| "loss": 0.2481, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.5483870967741935, |
| "grad_norm": 1.1032094587545522, |
| "learning_rate": 7.06839220180342e-07, |
| "loss": 0.2124, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.5591397849462365, |
| "grad_norm": 1.0607815817961583, |
| "learning_rate": 6.750975260474718e-07, |
| "loss": 0.2528, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.5698924731182795, |
| "grad_norm": 1.1102586160747114, |
| "learning_rate": 6.440333532118503e-07, |
| "loss": 0.218, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.5806451612903225, |
| "grad_norm": 1.067767516141593, |
| "learning_rate": 6.136515680580479e-07, |
| "loss": 0.2233, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.5913978494623655, |
| "grad_norm": 0.998507674449185, |
| "learning_rate": 5.839569300706127e-07, |
| "loss": 0.2116, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.6021505376344085, |
| "grad_norm": 1.0336044812206406, |
| "learning_rate": 5.549540910884649e-07, |
| "loss": 0.1977, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.6129032258064515, |
| "grad_norm": 1.0581929188450587, |
| "learning_rate": 5.266475945761562e-07, |
| "loss": 0.2062, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.6236559139784945, |
| "grad_norm": 1.1943327940895927, |
| "learning_rate": 4.990418749121179e-07, |
| "loss": 0.2217, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.6344086021505375, |
| "grad_norm": 1.0573079691578624, |
| "learning_rate": 4.721412566939804e-07, |
| "loss": 0.2265, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.6451612903225805, |
| "grad_norm": 1.010938446897227, |
| "learning_rate": 4.4594995406110785e-07, |
| "loss": 0.2379, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.6559139784946235, |
| "grad_norm": 1.1607031036965616, |
| "learning_rate": 4.2047207003442003e-07, |
| "loss": 0.2079, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 1.0632285683620692, |
| "learning_rate": 3.957115958736374e-07, |
| "loss": 0.2321, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.6774193548387095, |
| "grad_norm": 0.9910529748392375, |
| "learning_rate": 3.7167241045202474e-07, |
| "loss": 0.1987, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.688172043010753, |
| "grad_norm": 1.0775742035983213, |
| "learning_rate": 3.483582796487395e-07, |
| "loss": 0.2094, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.698924731182796, |
| "grad_norm": 0.9984569332006167, |
| "learning_rate": 3.257728557588902e-07, |
| "loss": 0.25, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.709677419354839, |
| "grad_norm": 1.206448802708809, |
| "learning_rate": 3.039196769213787e-07, |
| "loss": 0.2763, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.720430107526882, |
| "grad_norm": 1.1154480238255622, |
| "learning_rate": 2.828021665646341e-07, |
| "loss": 0.221, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.731182795698925, |
| "grad_norm": 1.1493470631886933, |
| "learning_rate": 2.6242363287030617e-07, |
| "loss": 0.3025, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.741935483870968, |
| "grad_norm": 1.0349657874746057, |
| "learning_rate": 2.4278726825502696e-07, |
| "loss": 0.2539, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.752688172043011, |
| "grad_norm": 1.109346692196228, |
| "learning_rate": 2.2389614887029564e-07, |
| "loss": 0.2281, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.763440860215054, |
| "grad_norm": 1.0353141380673936, |
| "learning_rate": 2.0575323412058036e-07, |
| "loss": 0.2141, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.774193548387097, |
| "grad_norm": 1.0819606199339722, |
| "learning_rate": 1.8836136619971468e-07, |
| "loss": 0.2195, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.78494623655914, |
| "grad_norm": 1.177939271195523, |
| "learning_rate": 1.7172326964564777e-07, |
| "loss": 0.2508, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.795698924731183, |
| "grad_norm": 1.1223938711713661, |
| "learning_rate": 1.5584155091362907e-07, |
| "loss": 0.2264, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.806451612903226, |
| "grad_norm": 0.9908185773525874, |
| "learning_rate": 1.4071869796789427e-07, |
| "loss": 0.1976, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.817204301075269, |
| "grad_norm": 1.002478958639444, |
| "learning_rate": 1.263570798919106e-07, |
| "loss": 0.1927, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.827956989247312, |
| "grad_norm": 1.0056557952222767, |
| "learning_rate": 1.1275894651724517e-07, |
| "loss": 0.1868, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.838709677419355, |
| "grad_norm": 1.0197297289564395, |
| "learning_rate": 9.992642807111486e-08, |
| "loss": 0.1927, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.849462365591398, |
| "grad_norm": 1.0517533923654494, |
| "learning_rate": 8.78615348426759e-08, |
| "loss": 0.2234, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.860215053763441, |
| "grad_norm": 0.9940345005525202, |
| "learning_rate": 7.656615686809976e-08, |
| "loss": 0.1817, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.870967741935484, |
| "grad_norm": 1.0497686699125726, |
| "learning_rate": 6.604206363448662e-08, |
| "loss": 0.2152, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.881720430107527, |
| "grad_norm": 1.0384511431694428, |
| "learning_rate": 5.6290903802665444e-08, |
| "loss": 0.2158, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.89247311827957, |
| "grad_norm": 1.3090254246994417, |
| "learning_rate": 4.7314204948923356e-08, |
| "loss": 0.2409, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.903225806451613, |
| "grad_norm": 1.0831200870108442, |
| "learning_rate": 3.911337332569876e-08, |
| "loss": 0.215, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.913978494623656, |
| "grad_norm": 1.0200774345835755, |
| "learning_rate": 3.168969364128527e-08, |
| "loss": 0.1853, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.924731182795699, |
| "grad_norm": 1.0411062374812041, |
| "learning_rate": 2.5044328858576105e-08, |
| "loss": 0.1822, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.935483870967742, |
| "grad_norm": 1.0955663825580222, |
| "learning_rate": 1.917832001287645e-08, |
| "loss": 0.2366, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.946236559139785, |
| "grad_norm": 1.0058951894911308, |
| "learning_rate": 1.4092586048820578e-08, |
| "loss": 0.2088, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.956989247311828, |
| "grad_norm": 1.0534367805397553, |
| "learning_rate": 9.787923676414235e-09, |
| "loss": 0.2006, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.967741935483871, |
| "grad_norm": 1.0711677289438932, |
| "learning_rate": 6.265007246223365e-09, |
| "loss": 0.2288, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.978494623655914, |
| "grad_norm": 1.1018665243256054, |
| "learning_rate": 3.524388643736387e-09, |
| "loss": 0.2359, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.989247311827957, |
| "grad_norm": 0.9977813212471681, |
| "learning_rate": 1.566497202904471e-09, |
| "loss": 0.2032, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 1.121191187403273, |
| "learning_rate": 3.916396388869981e-10, |
| "loss": 0.2414, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 279, |
| "total_flos": 2.2819812112387277e+17, |
| "train_loss": 0.38972266467027766, |
| "train_runtime": 34674.5164, |
| "train_samples_per_second": 0.386, |
| "train_steps_per_second": 0.008 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 279, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.2819812112387277e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|