{ "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 500, "global_step": 335, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0, "grad_norm": 9.992014557607169, "learning_rate": 9.090909090909091e-07, "loss": 2.7651, "step": 1 }, { "epoch": 0.01, "grad_norm": 9.855357081400161, "learning_rate": 1.8181818181818183e-06, "loss": 2.7106, "step": 2 }, { "epoch": 0.01, "grad_norm": 10.452032696456367, "learning_rate": 2.7272727272727272e-06, "loss": 2.7538, "step": 3 }, { "epoch": 0.01, "grad_norm": 9.376280328426684, "learning_rate": 3.6363636363636366e-06, "loss": 2.5369, "step": 4 }, { "epoch": 0.01, "grad_norm": 8.188703375753345, "learning_rate": 4.5454545454545455e-06, "loss": 2.4723, "step": 5 }, { "epoch": 0.02, "grad_norm": 7.173561512899845, "learning_rate": 5.4545454545454545e-06, "loss": 2.4736, "step": 6 }, { "epoch": 0.02, "grad_norm": 7.1450790437206235, "learning_rate": 6.363636363636364e-06, "loss": 2.4403, "step": 7 }, { "epoch": 0.02, "grad_norm": 4.247742053090214, "learning_rate": 7.272727272727273e-06, "loss": 2.0348, "step": 8 }, { "epoch": 0.03, "grad_norm": 4.481406635010285, "learning_rate": 8.181818181818183e-06, "loss": 1.9127, "step": 9 }, { "epoch": 0.03, "grad_norm": 5.521133417095248, "learning_rate": 9.090909090909091e-06, "loss": 1.891, "step": 10 }, { "epoch": 0.03, "grad_norm": 8.731593362969328, "learning_rate": 1e-05, "loss": 1.8907, "step": 11 }, { "epoch": 0.04, "grad_norm": 6.005591131903454, "learning_rate": 9.99976495753613e-06, "loss": 1.7979, "step": 12 }, { "epoch": 0.04, "grad_norm": 3.0731579551147816, "learning_rate": 9.999059852242508e-06, "loss": 1.6642, "step": 13 }, { "epoch": 0.04, "grad_norm": 2.7779072164471383, "learning_rate": 9.997884750411004e-06, "loss": 1.4797, "step": 14 }, { "epoch": 0.04, "grad_norm": 3.3917050377427156, "learning_rate": 9.996239762521152e-06, "loss": 1.6384, "step": 15 }, { "epoch": 0.05, "grad_norm": 3.348652641302203, "learning_rate": 9.994125043229753e-06, "loss": 1.4289, "step": 16 }, { "epoch": 0.05, "grad_norm": 2.7580016074034144, "learning_rate": 9.991540791356342e-06, "loss": 1.3537, "step": 17 }, { "epoch": 0.05, "grad_norm": 2.600060931738886, "learning_rate": 9.98848724986449e-06, "loss": 1.4414, "step": 18 }, { "epoch": 0.06, "grad_norm": 2.402583908854572, "learning_rate": 9.98496470583896e-06, "loss": 1.4358, "step": 19 }, { "epoch": 0.06, "grad_norm": 2.3681534411005365, "learning_rate": 9.980973490458728e-06, "loss": 1.3664, "step": 20 }, { "epoch": 0.06, "grad_norm": 2.417118446828482, "learning_rate": 9.976513978965829e-06, "loss": 1.2636, "step": 21 }, { "epoch": 0.07, "grad_norm": 2.406034881091615, "learning_rate": 9.971586590630094e-06, "loss": 1.2646, "step": 22 }, { "epoch": 0.07, "grad_norm": 2.2282767138519564, "learning_rate": 9.966191788709716e-06, "loss": 1.2749, "step": 23 }, { "epoch": 0.07, "grad_norm": 2.0495586711063853, "learning_rate": 9.960330080407712e-06, "loss": 1.2495, "step": 24 }, { "epoch": 0.07, "grad_norm": 2.232570025850529, "learning_rate": 9.954002016824226e-06, "loss": 1.255, "step": 25 }, { "epoch": 0.08, "grad_norm": 1.9746334176340221, "learning_rate": 9.947208192904722e-06, "loss": 1.2702, "step": 26 }, { "epoch": 0.08, "grad_norm": 1.90630424922607, "learning_rate": 9.939949247384046e-06, "loss": 1.1942, "step": 27 }, { "epoch": 0.08, "grad_norm": 2.0737836420918563, "learning_rate": 9.93222586272637e-06, "loss": 1.259, "step": 28 }, { "epoch": 0.09, "grad_norm": 2.0980819753190514, "learning_rate": 9.924038765061042e-06, "loss": 1.2254, "step": 29 }, { "epoch": 0.09, "grad_norm": 2.07955602263235, "learning_rate": 9.915388724114301e-06, "loss": 1.1271, "step": 30 }, { "epoch": 0.09, "grad_norm": 2.1659071183953023, "learning_rate": 9.906276553136924e-06, "loss": 1.2434, "step": 31 }, { "epoch": 0.1, "grad_norm": 2.1275763181049014, "learning_rate": 9.896703108827758e-06, "loss": 1.1295, "step": 32 }, { "epoch": 0.1, "grad_norm": 2.124992777307241, "learning_rate": 9.886669291253178e-06, "loss": 1.0909, "step": 33 }, { "epoch": 0.1, "grad_norm": 2.1457535781359405, "learning_rate": 9.876176043762467e-06, "loss": 1.0579, "step": 34 }, { "epoch": 0.1, "grad_norm": 2.299797197398565, "learning_rate": 9.86522435289912e-06, "loss": 1.1403, "step": 35 }, { "epoch": 0.11, "grad_norm": 1.9841854125040834, "learning_rate": 9.853815248308101e-06, "loss": 1.1669, "step": 36 }, { "epoch": 0.11, "grad_norm": 1.9449153939112522, "learning_rate": 9.841949802639031e-06, "loss": 1.177, "step": 37 }, { "epoch": 0.11, "grad_norm": 1.823305444585483, "learning_rate": 9.829629131445342e-06, "loss": 1.0274, "step": 38 }, { "epoch": 0.12, "grad_norm": 1.9967356357088837, "learning_rate": 9.816854393079402e-06, "loss": 1.052, "step": 39 }, { "epoch": 0.12, "grad_norm": 2.0742167672185525, "learning_rate": 9.803626788583603e-06, "loss": 1.0244, "step": 40 }, { "epoch": 0.12, "grad_norm": 2.030292578786332, "learning_rate": 9.789947561577445e-06, "loss": 1.0812, "step": 41 }, { "epoch": 0.13, "grad_norm": 2.1285455940700455, "learning_rate": 9.775817998140615e-06, "loss": 1.0372, "step": 42 }, { "epoch": 0.13, "grad_norm": 2.104132559156802, "learning_rate": 9.761239426692077e-06, "loss": 1.0363, "step": 43 }, { "epoch": 0.13, "grad_norm": 2.1801107672452096, "learning_rate": 9.74621321786517e-06, "loss": 1.1396, "step": 44 }, { "epoch": 0.13, "grad_norm": 1.930455012718363, "learning_rate": 9.730740784378755e-06, "loss": 1.0207, "step": 45 }, { "epoch": 0.14, "grad_norm": 1.9114532903731203, "learning_rate": 9.71482358090438e-06, "loss": 1.0088, "step": 46 }, { "epoch": 0.14, "grad_norm": 1.9603910862722937, "learning_rate": 9.698463103929542e-06, "loss": 1.0367, "step": 47 }, { "epoch": 0.14, "grad_norm": 2.0049474141842483, "learning_rate": 9.681660891616967e-06, "loss": 1.04, "step": 48 }, { "epoch": 0.15, "grad_norm": 1.9843717409843054, "learning_rate": 9.664418523660004e-06, "loss": 1.0541, "step": 49 }, { "epoch": 0.15, "grad_norm": 1.8735616571233806, "learning_rate": 9.646737621134112e-06, "loss": 1.0682, "step": 50 }, { "epoch": 0.15, "grad_norm": 2.0255894944970154, "learning_rate": 9.628619846344453e-06, "loss": 1.072, "step": 51 }, { "epoch": 0.16, "grad_norm": 1.8854561919254644, "learning_rate": 9.610066902669593e-06, "loss": 1.0295, "step": 52 }, { "epoch": 0.16, "grad_norm": 1.834056540014944, "learning_rate": 9.591080534401371e-06, "loss": 1.0185, "step": 53 }, { "epoch": 0.16, "grad_norm": 1.9190598160066954, "learning_rate": 9.571662526580898e-06, "loss": 1.0398, "step": 54 }, { "epoch": 0.16, "grad_norm": 1.927342754807542, "learning_rate": 9.551814704830734e-06, "loss": 1.0799, "step": 55 }, { "epoch": 0.17, "grad_norm": 1.9976883134890686, "learning_rate": 9.531538935183252e-06, "loss": 0.9839, "step": 56 }, { "epoch": 0.17, "grad_norm": 2.010561519470896, "learning_rate": 9.51083712390519e-06, "loss": 1.0452, "step": 57 }, { "epoch": 0.17, "grad_norm": 1.8025815822535263, "learning_rate": 9.48971121731844e-06, "loss": 0.9052, "step": 58 }, { "epoch": 0.18, "grad_norm": 2.1672411135729948, "learning_rate": 9.468163201617063e-06, "loss": 0.9764, "step": 59 }, { "epoch": 0.18, "grad_norm": 2.1274344440002197, "learning_rate": 9.446195102680531e-06, "loss": 1.0161, "step": 60 }, { "epoch": 0.18, "grad_norm": 1.9154643005374183, "learning_rate": 9.423808985883289e-06, "loss": 0.9014, "step": 61 }, { "epoch": 0.19, "grad_norm": 1.9253885360090393, "learning_rate": 9.401006955900555e-06, "loss": 0.9921, "step": 62 }, { "epoch": 0.19, "grad_norm": 1.8131597896526979, "learning_rate": 9.377791156510456e-06, "loss": 0.9996, "step": 63 }, { "epoch": 0.19, "grad_norm": 2.047402757936845, "learning_rate": 9.35416377039246e-06, "loss": 1.0432, "step": 64 }, { "epoch": 0.19, "grad_norm": 2.42467076534372, "learning_rate": 9.330127018922195e-06, "loss": 1.0092, "step": 65 }, { "epoch": 0.2, "grad_norm": 2.0406186242464015, "learning_rate": 9.305683161962569e-06, "loss": 1.0197, "step": 66 }, { "epoch": 0.2, "grad_norm": 1.996085329280642, "learning_rate": 9.280834497651334e-06, "loss": 0.999, "step": 67 }, { "epoch": 0.2, "grad_norm": 2.050405093618206, "learning_rate": 9.255583362184998e-06, "loss": 1.0275, "step": 68 }, { "epoch": 0.21, "grad_norm": 1.9391779248159844, "learning_rate": 9.229932129599206e-06, "loss": 1.0069, "step": 69 }, { "epoch": 0.21, "grad_norm": 1.8688340050178358, "learning_rate": 9.203883211545517e-06, "loss": 1.024, "step": 70 }, { "epoch": 0.21, "grad_norm": 1.9107718382648906, "learning_rate": 9.177439057064684e-06, "loss": 0.9796, "step": 71 }, { "epoch": 0.21, "grad_norm": 1.8884098537907934, "learning_rate": 9.150602152356394e-06, "loss": 1.0094, "step": 72 }, { "epoch": 0.22, "grad_norm": 2.0666939259341337, "learning_rate": 9.123375020545534e-06, "loss": 1.1245, "step": 73 }, { "epoch": 0.22, "grad_norm": 1.9312576824640348, "learning_rate": 9.09576022144496e-06, "loss": 0.9542, "step": 74 }, { "epoch": 0.22, "grad_norm": 2.150486624339303, "learning_rate": 9.067760351314838e-06, "loss": 0.9796, "step": 75 }, { "epoch": 0.23, "grad_norm": 2.1900483954756464, "learning_rate": 9.039378042618556e-06, "loss": 1.1277, "step": 76 }, { "epoch": 0.23, "grad_norm": 1.8759460923333156, "learning_rate": 9.01061596377522e-06, "loss": 0.9205, "step": 77 }, { "epoch": 0.23, "grad_norm": 2.0684517325582923, "learning_rate": 8.981476818908778e-06, "loss": 1.0655, "step": 78 }, { "epoch": 0.24, "grad_norm": 1.7015404285579259, "learning_rate": 8.951963347593797e-06, "loss": 0.9322, "step": 79 }, { "epoch": 0.24, "grad_norm": 2.0128565561264113, "learning_rate": 8.92207832459788e-06, "loss": 1.0096, "step": 80 }, { "epoch": 0.24, "grad_norm": 2.2955683670099836, "learning_rate": 8.891824559620801e-06, "loss": 0.93, "step": 81 }, { "epoch": 0.24, "grad_norm": 1.9819221656020602, "learning_rate": 8.861204897030346e-06, "loss": 0.8111, "step": 82 }, { "epoch": 0.25, "grad_norm": 1.8840968076917093, "learning_rate": 8.83022221559489e-06, "loss": 0.8334, "step": 83 }, { "epoch": 0.25, "grad_norm": 1.8148160639213984, "learning_rate": 8.798879428212748e-06, "loss": 0.9797, "step": 84 }, { "epoch": 0.25, "grad_norm": 1.7219644312125897, "learning_rate": 8.767179481638303e-06, "loss": 0.9265, "step": 85 }, { "epoch": 0.26, "grad_norm": 1.8153657619801007, "learning_rate": 8.735125356204982e-06, "loss": 0.9308, "step": 86 }, { "epoch": 0.26, "grad_norm": 2.1864487165433717, "learning_rate": 8.702720065545024e-06, "loss": 0.8714, "step": 87 }, { "epoch": 0.26, "grad_norm": 1.9319176084767922, "learning_rate": 8.669966656306176e-06, "loss": 0.8996, "step": 88 }, { "epoch": 0.27, "grad_norm": 1.8941836323323051, "learning_rate": 8.636868207865244e-06, "loss": 0.9559, "step": 89 }, { "epoch": 0.27, "grad_norm": 2.0942714525772264, "learning_rate": 8.603427832038574e-06, "loss": 0.9642, "step": 90 }, { "epoch": 0.27, "grad_norm": 1.956607567158922, "learning_rate": 8.569648672789496e-06, "loss": 0.9556, "step": 91 }, { "epoch": 0.27, "grad_norm": 2.0359194563727008, "learning_rate": 8.535533905932739e-06, "loss": 0.9433, "step": 92 }, { "epoch": 0.28, "grad_norm": 1.8831570100390131, "learning_rate": 8.501086738835843e-06, "loss": 0.9653, "step": 93 }, { "epoch": 0.28, "grad_norm": 1.970446143582905, "learning_rate": 8.466310410117622e-06, "loss": 0.9637, "step": 94 }, { "epoch": 0.28, "grad_norm": 2.0437287034957903, "learning_rate": 8.43120818934367e-06, "loss": 0.9639, "step": 95 }, { "epoch": 0.29, "grad_norm": 1.9551401347597726, "learning_rate": 8.395783376718967e-06, "loss": 0.908, "step": 96 }, { "epoch": 0.29, "grad_norm": 1.9195968647766901, "learning_rate": 8.360039302777614e-06, "loss": 0.9048, "step": 97 }, { "epoch": 0.29, "grad_norm": 1.853992686163623, "learning_rate": 8.323979328069689e-06, "loss": 0.9351, "step": 98 }, { "epoch": 0.3, "grad_norm": 1.8308983331681774, "learning_rate": 8.28760684284532e-06, "loss": 0.9443, "step": 99 }, { "epoch": 0.3, "grad_norm": 2.1769204299308758, "learning_rate": 8.25092526673592e-06, "loss": 1.0337, "step": 100 }, { "epoch": 0.3, "grad_norm": 2.0101187852614335, "learning_rate": 8.213938048432697e-06, "loss": 0.9319, "step": 101 }, { "epoch": 0.3, "grad_norm": 2.1098348893292616, "learning_rate": 8.176648665362426e-06, "loss": 0.8138, "step": 102 }, { "epoch": 0.31, "grad_norm": 2.018298542101901, "learning_rate": 8.139060623360494e-06, "loss": 0.9322, "step": 103 }, { "epoch": 0.31, "grad_norm": 1.97268436525157, "learning_rate": 8.101177456341301e-06, "loss": 0.908, "step": 104 }, { "epoch": 0.31, "grad_norm": 1.8905564445487661, "learning_rate": 8.063002725966014e-06, "loss": 0.9059, "step": 105 }, { "epoch": 0.32, "grad_norm": 1.8040796552589426, "learning_rate": 8.024540021307709e-06, "loss": 0.8834, "step": 106 }, { "epoch": 0.32, "grad_norm": 1.8099754444058382, "learning_rate": 7.985792958513932e-06, "loss": 0.949, "step": 107 }, { "epoch": 0.32, "grad_norm": 1.859283445011557, "learning_rate": 7.946765180466725e-06, "loss": 0.8598, "step": 108 }, { "epoch": 0.33, "grad_norm": 2.017173689625784, "learning_rate": 7.907460356440133e-06, "loss": 0.98, "step": 109 }, { "epoch": 0.33, "grad_norm": 1.9067977524786353, "learning_rate": 7.86788218175523e-06, "loss": 0.9113, "step": 110 }, { "epoch": 0.33, "grad_norm": 1.8356374617177302, "learning_rate": 7.828034377432694e-06, "loss": 0.9379, "step": 111 }, { "epoch": 0.33, "grad_norm": 1.9671234496145011, "learning_rate": 7.787920689842965e-06, "loss": 0.971, "step": 112 }, { "epoch": 0.34, "grad_norm": 1.9057301922594099, "learning_rate": 7.747544890354031e-06, "loss": 0.9351, "step": 113 }, { "epoch": 0.34, "grad_norm": 1.8176638279870596, "learning_rate": 7.706910774976849e-06, "loss": 0.8129, "step": 114 }, { "epoch": 0.34, "grad_norm": 1.8480732550035386, "learning_rate": 7.666022164008458e-06, "loss": 0.9127, "step": 115 }, { "epoch": 0.35, "grad_norm": 1.8342675263668593, "learning_rate": 7.624882901672801e-06, "loss": 0.8875, "step": 116 }, { "epoch": 0.35, "grad_norm": 1.8479911871602939, "learning_rate": 7.5834968557593155e-06, "loss": 0.9814, "step": 117 }, { "epoch": 0.35, "grad_norm": 1.953930833517739, "learning_rate": 7.541867917259278e-06, "loss": 1.0459, "step": 118 }, { "epoch": 0.36, "grad_norm": 1.816248946402888, "learning_rate": 7.500000000000001e-06, "loss": 0.8606, "step": 119 }, { "epoch": 0.36, "grad_norm": 1.829758704776544, "learning_rate": 7.457897040276853e-06, "loss": 0.9917, "step": 120 }, { "epoch": 0.36, "grad_norm": 1.7798304171360442, "learning_rate": 7.415562996483193e-06, "loss": 0.9226, "step": 121 }, { "epoch": 0.36, "grad_norm": 1.949746360544544, "learning_rate": 7.373001848738203e-06, "loss": 1.0089, "step": 122 }, { "epoch": 0.37, "grad_norm": 1.7709880415336614, "learning_rate": 7.330217598512696e-06, "loss": 0.8108, "step": 123 }, { "epoch": 0.37, "grad_norm": 1.8079747568178568, "learning_rate": 7.2872142682529045e-06, "loss": 0.8246, "step": 124 }, { "epoch": 0.37, "grad_norm": 1.8171645027732521, "learning_rate": 7.243995901002312e-06, "loss": 0.9032, "step": 125 }, { "epoch": 0.38, "grad_norm": 1.875345818100136, "learning_rate": 7.200566560021525e-06, "loss": 0.8692, "step": 126 }, { "epoch": 0.38, "grad_norm": 1.9933228492567425, "learning_rate": 7.156930328406268e-06, "loss": 0.93, "step": 127 }, { "epoch": 0.38, "grad_norm": 1.9411283328658857, "learning_rate": 7.113091308703498e-06, "loss": 0.9071, "step": 128 }, { "epoch": 0.39, "grad_norm": 2.090341113079029, "learning_rate": 7.069053622525697e-06, "loss": 1.043, "step": 129 }, { "epoch": 0.39, "grad_norm": 1.91058658362823, "learning_rate": 7.0248214101633685e-06, "loss": 0.8674, "step": 130 }, { "epoch": 0.39, "grad_norm": 1.9696219193781328, "learning_rate": 6.980398830195785e-06, "loss": 1.0577, "step": 131 }, { "epoch": 0.39, "grad_norm": 1.9555717096721157, "learning_rate": 6.9357900591000034e-06, "loss": 0.9531, "step": 132 }, { "epoch": 0.4, "grad_norm": 1.8951036355458846, "learning_rate": 6.890999290858213e-06, "loss": 1.006, "step": 133 }, { "epoch": 0.4, "grad_norm": 1.8568126429433485, "learning_rate": 6.8460307365634225e-06, "loss": 0.8623, "step": 134 }, { "epoch": 0.4, "grad_norm": 1.8704210955739529, "learning_rate": 6.800888624023552e-06, "loss": 0.9493, "step": 135 }, { "epoch": 0.41, "grad_norm": 1.832260849480108, "learning_rate": 6.755577197363945e-06, "loss": 0.8205, "step": 136 }, { "epoch": 0.41, "grad_norm": 2.031520077283429, "learning_rate": 6.710100716628345e-06, "loss": 0.8792, "step": 137 }, { "epoch": 0.41, "grad_norm": 1.7410235600999984, "learning_rate": 6.6644634573783825e-06, "loss": 0.8951, "step": 138 }, { "epoch": 0.41, "grad_norm": 1.843737618356284, "learning_rate": 6.618669710291607e-06, "loss": 0.8085, "step": 139 }, { "epoch": 0.42, "grad_norm": 1.765762644440963, "learning_rate": 6.572723780758069e-06, "loss": 0.8957, "step": 140 }, { "epoch": 0.42, "grad_norm": 1.9511281601416317, "learning_rate": 6.526629988475567e-06, "loss": 0.9, "step": 141 }, { "epoch": 0.42, "grad_norm": 1.7773818546444415, "learning_rate": 6.4803926670435e-06, "loss": 0.9234, "step": 142 }, { "epoch": 0.43, "grad_norm": 1.8692189103556698, "learning_rate": 6.434016163555452e-06, "loss": 0.8899, "step": 143 }, { "epoch": 0.43, "grad_norm": 1.8255302240308842, "learning_rate": 6.387504838190479e-06, "loss": 0.8086, "step": 144 }, { "epoch": 0.43, "grad_norm": 1.8578841539686264, "learning_rate": 6.340863063803187e-06, "loss": 0.8248, "step": 145 }, { "epoch": 0.44, "grad_norm": 1.7994136623563066, "learning_rate": 6.294095225512604e-06, "loss": 0.933, "step": 146 }, { "epoch": 0.44, "grad_norm": 2.0121355236133978, "learning_rate": 6.247205720289907e-06, "loss": 1.0521, "step": 147 }, { "epoch": 0.44, "grad_norm": 1.869460952399974, "learning_rate": 6.2001989565450305e-06, "loss": 0.8515, "step": 148 }, { "epoch": 0.44, "grad_norm": 1.8246450967462686, "learning_rate": 6.153079353712201e-06, "loss": 0.852, "step": 149 }, { "epoch": 0.45, "grad_norm": 1.9648734937960934, "learning_rate": 6.105851341834439e-06, "loss": 0.8434, "step": 150 }, { "epoch": 0.45, "grad_norm": 1.7658190156459108, "learning_rate": 6.058519361147055e-06, "loss": 0.8406, "step": 151 }, { "epoch": 0.45, "grad_norm": 1.8896576006369, "learning_rate": 6.011087861660191e-06, "loss": 0.8235, "step": 152 }, { "epoch": 0.46, "grad_norm": 1.701511265987839, "learning_rate": 5.9635613027404495e-06, "loss": 0.83, "step": 153 }, { "epoch": 0.46, "grad_norm": 1.8458019088871649, "learning_rate": 5.915944152691634e-06, "loss": 0.8061, "step": 154 }, { "epoch": 0.46, "grad_norm": 1.8758600805480732, "learning_rate": 5.8682408883346535e-06, "loss": 0.8738, "step": 155 }, { "epoch": 0.47, "grad_norm": 1.7770461619321762, "learning_rate": 5.820455994586621e-06, "loss": 0.8144, "step": 156 }, { "epoch": 0.47, "grad_norm": 2.06521139231319, "learning_rate": 5.772593964039203e-06, "loss": 1.0683, "step": 157 }, { "epoch": 0.47, "grad_norm": 1.8949940538942156, "learning_rate": 5.724659296536234e-06, "loss": 0.8378, "step": 158 }, { "epoch": 0.47, "grad_norm": 1.8329328421055082, "learning_rate": 5.6766564987506564e-06, "loss": 0.8059, "step": 159 }, { "epoch": 0.48, "grad_norm": 1.9356837835306697, "learning_rate": 5.628590083760815e-06, "loss": 0.8759, "step": 160 }, { "epoch": 0.48, "grad_norm": 1.914824151289401, "learning_rate": 5.5804645706261515e-06, "loss": 0.9575, "step": 161 }, { "epoch": 0.48, "grad_norm": 1.9610745203544777, "learning_rate": 5.532284483962341e-06, "loss": 0.8548, "step": 162 }, { "epoch": 0.49, "grad_norm": 1.9847850375923817, "learning_rate": 5.484054353515896e-06, "loss": 0.9005, "step": 163 }, { "epoch": 0.49, "grad_norm": 1.900418899811939, "learning_rate": 5.435778713738292e-06, "loss": 0.9209, "step": 164 }, { "epoch": 0.49, "grad_norm": 1.7892930994134306, "learning_rate": 5.387462103359655e-06, "loss": 0.8555, "step": 165 }, { "epoch": 0.5, "grad_norm": 1.774160649412005, "learning_rate": 5.339109064962047e-06, "loss": 0.7846, "step": 166 }, { "epoch": 0.5, "grad_norm": 1.9563920124901706, "learning_rate": 5.290724144552379e-06, "loss": 0.7538, "step": 167 }, { "epoch": 0.5, "grad_norm": 1.9120830031418432, "learning_rate": 5.242311891135016e-06, "loss": 0.9287, "step": 168 }, { "epoch": 0.5, "grad_norm": 1.7820515084191753, "learning_rate": 5.193876856284085e-06, "loss": 0.7843, "step": 169 }, { "epoch": 0.51, "grad_norm": 1.8838516311972966, "learning_rate": 5.145423593715558e-06, "loss": 0.8848, "step": 170 }, { "epoch": 0.51, "grad_norm": 1.8000718579254187, "learning_rate": 5.096956658859122e-06, "loss": 0.8422, "step": 171 }, { "epoch": 0.51, "grad_norm": 1.9252361586236277, "learning_rate": 5.048480608429893e-06, "loss": 0.788, "step": 172 }, { "epoch": 0.52, "grad_norm": 1.7838973065656873, "learning_rate": 5e-06, "loss": 0.8419, "step": 173 }, { "epoch": 0.52, "grad_norm": 1.7406962046278718, "learning_rate": 4.951519391570108e-06, "loss": 0.8617, "step": 174 }, { "epoch": 0.52, "grad_norm": 1.833052653443519, "learning_rate": 4.903043341140879e-06, "loss": 0.7839, "step": 175 }, { "epoch": 0.53, "grad_norm": 1.9635309949293125, "learning_rate": 4.854576406284443e-06, "loss": 0.745, "step": 176 }, { "epoch": 0.53, "grad_norm": 1.8878508304922175, "learning_rate": 4.806123143715916e-06, "loss": 0.898, "step": 177 }, { "epoch": 0.53, "grad_norm": 1.8490253561704422, "learning_rate": 4.7576881088649865e-06, "loss": 0.8764, "step": 178 }, { "epoch": 0.53, "grad_norm": 1.8492342801201405, "learning_rate": 4.7092758554476215e-06, "loss": 0.8734, "step": 179 }, { "epoch": 0.54, "grad_norm": 1.678333641735353, "learning_rate": 4.660890935037954e-06, "loss": 0.8476, "step": 180 }, { "epoch": 0.54, "grad_norm": 1.9197951126550667, "learning_rate": 4.6125378966403465e-06, "loss": 0.8502, "step": 181 }, { "epoch": 0.54, "grad_norm": 1.8065516269516981, "learning_rate": 4.564221286261709e-06, "loss": 0.8434, "step": 182 }, { "epoch": 0.55, "grad_norm": 2.090063964079408, "learning_rate": 4.515945646484105e-06, "loss": 0.9075, "step": 183 }, { "epoch": 0.55, "grad_norm": 1.714803378036167, "learning_rate": 4.467715516037659e-06, "loss": 0.8979, "step": 184 }, { "epoch": 0.55, "grad_norm": 1.8920297684515237, "learning_rate": 4.4195354293738484e-06, "loss": 0.917, "step": 185 }, { "epoch": 0.56, "grad_norm": 1.9646366988613642, "learning_rate": 4.371409916239188e-06, "loss": 0.8406, "step": 186 }, { "epoch": 0.56, "grad_norm": 2.0095779435113643, "learning_rate": 4.323343501249346e-06, "loss": 0.8105, "step": 187 }, { "epoch": 0.56, "grad_norm": 1.84960716754979, "learning_rate": 4.275340703463767e-06, "loss": 0.709, "step": 188 }, { "epoch": 0.56, "grad_norm": 1.8784422747688236, "learning_rate": 4.227406035960798e-06, "loss": 0.8721, "step": 189 }, { "epoch": 0.57, "grad_norm": 1.7892389168859104, "learning_rate": 4.17954400541338e-06, "loss": 0.8237, "step": 190 }, { "epoch": 0.57, "grad_norm": 1.834621493465168, "learning_rate": 4.131759111665349e-06, "loss": 0.858, "step": 191 }, { "epoch": 0.57, "grad_norm": 2.0244248532984312, "learning_rate": 4.084055847308367e-06, "loss": 0.84, "step": 192 }, { "epoch": 0.58, "grad_norm": 1.7956990250048366, "learning_rate": 4.036438697259551e-06, "loss": 0.8343, "step": 193 }, { "epoch": 0.58, "grad_norm": 2.008026972496449, "learning_rate": 3.988912138339812e-06, "loss": 0.858, "step": 194 }, { "epoch": 0.58, "grad_norm": 2.02397192889752, "learning_rate": 3.941480638852948e-06, "loss": 0.8685, "step": 195 }, { "epoch": 0.59, "grad_norm": 1.8202366956390423, "learning_rate": 3.894148658165562e-06, "loss": 0.8428, "step": 196 }, { "epoch": 0.59, "grad_norm": 1.8728629012942652, "learning_rate": 3.8469206462878e-06, "loss": 0.833, "step": 197 }, { "epoch": 0.59, "grad_norm": 1.920110342063804, "learning_rate": 3.7998010434549716e-06, "loss": 0.923, "step": 198 }, { "epoch": 0.59, "grad_norm": 1.8750275450908436, "learning_rate": 3.752794279710094e-06, "loss": 0.8113, "step": 199 }, { "epoch": 0.6, "grad_norm": 1.7555094657763348, "learning_rate": 3.705904774487396e-06, "loss": 0.8442, "step": 200 }, { "epoch": 0.6, "grad_norm": 1.9946734158465282, "learning_rate": 3.6591369361968127e-06, "loss": 0.8566, "step": 201 }, { "epoch": 0.6, "grad_norm": 1.792346256212117, "learning_rate": 3.6124951618095224e-06, "loss": 0.8306, "step": 202 }, { "epoch": 0.61, "grad_norm": 1.7737697004497117, "learning_rate": 3.5659838364445505e-06, "loss": 0.827, "step": 203 }, { "epoch": 0.61, "grad_norm": 1.9896055506585861, "learning_rate": 3.519607332956502e-06, "loss": 0.8967, "step": 204 }, { "epoch": 0.61, "grad_norm": 2.0112406931128954, "learning_rate": 3.473370011524435e-06, "loss": 0.9582, "step": 205 }, { "epoch": 0.61, "grad_norm": 1.6933223451923685, "learning_rate": 3.427276219241933e-06, "loss": 0.7972, "step": 206 }, { "epoch": 0.62, "grad_norm": 1.7960032297149964, "learning_rate": 3.3813302897083955e-06, "loss": 0.91, "step": 207 }, { "epoch": 0.62, "grad_norm": 1.8409756298969775, "learning_rate": 3.335536542621617e-06, "loss": 0.8442, "step": 208 }, { "epoch": 0.62, "grad_norm": 1.8943440767619342, "learning_rate": 3.289899283371657e-06, "loss": 0.8782, "step": 209 }, { "epoch": 0.63, "grad_norm": 1.983602448512353, "learning_rate": 3.244422802636057e-06, "loss": 0.9121, "step": 210 }, { "epoch": 0.63, "grad_norm": 1.919904148023849, "learning_rate": 3.1991113759764493e-06, "loss": 0.8452, "step": 211 }, { "epoch": 0.63, "grad_norm": 1.7436938594681772, "learning_rate": 3.1539692634365788e-06, "loss": 0.8367, "step": 212 }, { "epoch": 0.64, "grad_norm": 1.9023867686458018, "learning_rate": 3.1090007091417884e-06, "loss": 0.7576, "step": 213 }, { "epoch": 0.64, "grad_norm": 1.8340656721562485, "learning_rate": 3.0642099408999982e-06, "loss": 0.9066, "step": 214 }, { "epoch": 0.64, "grad_norm": 2.123899315024522, "learning_rate": 3.019601169804216e-06, "loss": 0.8908, "step": 215 }, { "epoch": 0.64, "grad_norm": 1.970762510888818, "learning_rate": 2.975178589836632e-06, "loss": 0.8324, "step": 216 }, { "epoch": 0.65, "grad_norm": 1.7131201352172498, "learning_rate": 2.9309463774743047e-06, "loss": 0.9371, "step": 217 }, { "epoch": 0.65, "grad_norm": 1.692464765973636, "learning_rate": 2.886908691296504e-06, "loss": 0.8384, "step": 218 }, { "epoch": 0.65, "grad_norm": 1.754853534401181, "learning_rate": 2.843069671593734e-06, "loss": 0.8639, "step": 219 }, { "epoch": 0.66, "grad_norm": 1.7746221308047176, "learning_rate": 2.7994334399784773e-06, "loss": 0.8123, "step": 220 }, { "epoch": 0.66, "grad_norm": 1.7797496232199694, "learning_rate": 2.7560040989976894e-06, "loss": 0.8011, "step": 221 }, { "epoch": 0.66, "grad_norm": 1.7965856236183813, "learning_rate": 2.7127857317470967e-06, "loss": 0.8614, "step": 222 }, { "epoch": 0.67, "grad_norm": 1.895456729064966, "learning_rate": 2.6697824014873076e-06, "loss": 0.808, "step": 223 }, { "epoch": 0.67, "grad_norm": 1.6731115830560004, "learning_rate": 2.626998151261798e-06, "loss": 0.7644, "step": 224 }, { "epoch": 0.67, "grad_norm": 1.722068279724202, "learning_rate": 2.5844370035168077e-06, "loss": 0.7876, "step": 225 }, { "epoch": 0.67, "grad_norm": 1.8560082384134242, "learning_rate": 2.5421029597231476e-06, "loss": 0.8448, "step": 226 }, { "epoch": 0.68, "grad_norm": 1.8503877478285864, "learning_rate": 2.5000000000000015e-06, "loss": 0.8241, "step": 227 }, { "epoch": 0.68, "grad_norm": 1.9115184771008713, "learning_rate": 2.458132082740724e-06, "loss": 0.8465, "step": 228 }, { "epoch": 0.68, "grad_norm": 1.9363363832594316, "learning_rate": 2.4165031442406857e-06, "loss": 0.8231, "step": 229 }, { "epoch": 0.69, "grad_norm": 1.9209239281193067, "learning_rate": 2.3751170983272e-06, "loss": 0.9008, "step": 230 }, { "epoch": 0.69, "grad_norm": 1.8848510564703413, "learning_rate": 2.333977835991545e-06, "loss": 0.789, "step": 231 }, { "epoch": 0.69, "grad_norm": 1.885613538323485, "learning_rate": 2.293089225023152e-06, "loss": 0.9544, "step": 232 }, { "epoch": 0.7, "grad_norm": 1.9378339418142523, "learning_rate": 2.2524551096459703e-06, "loss": 0.8512, "step": 233 }, { "epoch": 0.7, "grad_norm": 1.8499392383494713, "learning_rate": 2.2120793101570366e-06, "loss": 0.7881, "step": 234 }, { "epoch": 0.7, "grad_norm": 1.8123212923542507, "learning_rate": 2.171965622567308e-06, "loss": 0.8658, "step": 235 }, { "epoch": 0.7, "grad_norm": 1.8978780385206444, "learning_rate": 2.132117818244771e-06, "loss": 0.8045, "step": 236 }, { "epoch": 0.71, "grad_norm": 1.7796628138142092, "learning_rate": 2.0925396435598665e-06, "loss": 0.813, "step": 237 }, { "epoch": 0.71, "grad_norm": 1.8442874060590642, "learning_rate": 2.053234819533276e-06, "loss": 0.7706, "step": 238 }, { "epoch": 0.71, "grad_norm": 1.8289341236387466, "learning_rate": 2.0142070414860704e-06, "loss": 0.8927, "step": 239 }, { "epoch": 0.72, "grad_norm": 1.9766538998425796, "learning_rate": 1.9754599786922913e-06, "loss": 0.848, "step": 240 }, { "epoch": 0.72, "grad_norm": 1.7214028779109611, "learning_rate": 1.936997274033986e-06, "loss": 0.7225, "step": 241 }, { "epoch": 0.72, "grad_norm": 1.5814120081508478, "learning_rate": 1.8988225436587005e-06, "loss": 0.7288, "step": 242 }, { "epoch": 0.73, "grad_norm": 1.7940729269240667, "learning_rate": 1.8609393766395083e-06, "loss": 0.9349, "step": 243 }, { "epoch": 0.73, "grad_norm": 1.878940225083856, "learning_rate": 1.823351334637576e-06, "loss": 0.8962, "step": 244 }, { "epoch": 0.73, "grad_norm": 1.8155856668279255, "learning_rate": 1.7860619515673034e-06, "loss": 0.88, "step": 245 }, { "epoch": 0.73, "grad_norm": 1.9394793705542583, "learning_rate": 1.7490747332640833e-06, "loss": 0.897, "step": 246 }, { "epoch": 0.74, "grad_norm": 1.8855178833940114, "learning_rate": 1.7123931571546826e-06, "loss": 0.8955, "step": 247 }, { "epoch": 0.74, "grad_norm": 1.680634839138001, "learning_rate": 1.6760206719303107e-06, "loss": 0.781, "step": 248 }, { "epoch": 0.74, "grad_norm": 1.8281579952033755, "learning_rate": 1.639960697222388e-06, "loss": 0.9578, "step": 249 }, { "epoch": 0.75, "grad_norm": 1.8390758231951718, "learning_rate": 1.6042166232810346e-06, "loss": 0.8377, "step": 250 }, { "epoch": 0.75, "grad_norm": 1.637452500506161, "learning_rate": 1.5687918106563326e-06, "loss": 0.7837, "step": 251 }, { "epoch": 0.75, "grad_norm": 1.8615373812049087, "learning_rate": 1.5336895898823801e-06, "loss": 0.8947, "step": 252 }, { "epoch": 0.76, "grad_norm": 1.8795666079438391, "learning_rate": 1.4989132611641576e-06, "loss": 0.8214, "step": 253 }, { "epoch": 0.76, "grad_norm": 2.1220008217273385, "learning_rate": 1.4644660940672628e-06, "loss": 0.8473, "step": 254 }, { "epoch": 0.76, "grad_norm": 2.016744480195016, "learning_rate": 1.4303513272105057e-06, "loss": 0.7648, "step": 255 }, { "epoch": 0.76, "grad_norm": 1.8273845868646243, "learning_rate": 1.396572167961427e-06, "loss": 0.9056, "step": 256 }, { "epoch": 0.77, "grad_norm": 1.9242593040018354, "learning_rate": 1.3631317921347564e-06, "loss": 0.8546, "step": 257 }, { "epoch": 0.77, "grad_norm": 1.7602720727922225, "learning_rate": 1.330033343693824e-06, "loss": 0.8168, "step": 258 }, { "epoch": 0.77, "grad_norm": 1.7981469172314437, "learning_rate": 1.297279934454978e-06, "loss": 0.8196, "step": 259 }, { "epoch": 0.78, "grad_norm": 1.934034355132605, "learning_rate": 1.264874643795021e-06, "loss": 0.8641, "step": 260 }, { "epoch": 0.78, "grad_norm": 1.731914923464836, "learning_rate": 1.2328205183616964e-06, "loss": 0.8316, "step": 261 }, { "epoch": 0.78, "grad_norm": 1.8671301669332583, "learning_rate": 1.2011205717872538e-06, "loss": 0.8973, "step": 262 }, { "epoch": 0.79, "grad_norm": 1.8258264225083662, "learning_rate": 1.1697777844051105e-06, "loss": 0.827, "step": 263 }, { "epoch": 0.79, "grad_norm": 1.76891519967969, "learning_rate": 1.1387951029696543e-06, "loss": 0.8073, "step": 264 }, { "epoch": 0.79, "grad_norm": 1.8056333135913247, "learning_rate": 1.1081754403792e-06, "loss": 0.8111, "step": 265 }, { "epoch": 0.79, "grad_norm": 1.6693657914010887, "learning_rate": 1.0779216754021215e-06, "loss": 0.8481, "step": 266 }, { "epoch": 0.8, "grad_norm": 1.8914182275757614, "learning_rate": 1.0480366524062041e-06, "loss": 0.7854, "step": 267 }, { "epoch": 0.8, "grad_norm": 1.7952669162789925, "learning_rate": 1.0185231810912223e-06, "loss": 0.8331, "step": 268 }, { "epoch": 0.8, "grad_norm": 1.8041970383317338, "learning_rate": 9.893840362247809e-07, "loss": 0.808, "step": 269 }, { "epoch": 0.81, "grad_norm": 1.7349503739814947, "learning_rate": 9.606219573814447e-07, "loss": 0.782, "step": 270 }, { "epoch": 0.81, "grad_norm": 1.971273696916632, "learning_rate": 9.322396486851626e-07, "loss": 0.8777, "step": 271 }, { "epoch": 0.81, "grad_norm": 1.6916809211406005, "learning_rate": 9.042397785550405e-07, "loss": 0.816, "step": 272 }, { "epoch": 0.81, "grad_norm": 1.859123501478176, "learning_rate": 8.766249794544662e-07, "loss": 0.8536, "step": 273 }, { "epoch": 0.82, "grad_norm": 1.9296428042034799, "learning_rate": 8.49397847643606e-07, "loss": 0.9167, "step": 274 }, { "epoch": 0.82, "grad_norm": 1.9014753831076923, "learning_rate": 8.225609429353187e-07, "loss": 0.8984, "step": 275 }, { "epoch": 0.82, "grad_norm": 1.9966126008374474, "learning_rate": 7.961167884544852e-07, "loss": 0.8615, "step": 276 }, { "epoch": 0.83, "grad_norm": 1.7845802462997415, "learning_rate": 7.700678704007947e-07, "loss": 0.8524, "step": 277 }, { "epoch": 0.83, "grad_norm": 1.786614071106851, "learning_rate": 7.444166378150014e-07, "loss": 0.8085, "step": 278 }, { "epoch": 0.83, "grad_norm": 1.6646768752142675, "learning_rate": 7.191655023486682e-07, "loss": 0.867, "step": 279 }, { "epoch": 0.84, "grad_norm": 1.934324323024249, "learning_rate": 6.94316838037431e-07, "loss": 0.724, "step": 280 }, { "epoch": 0.84, "grad_norm": 1.7670716798370054, "learning_rate": 6.698729810778065e-07, "loss": 0.7882, "step": 281 }, { "epoch": 0.84, "grad_norm": 1.8041863674588816, "learning_rate": 6.458362296075399e-07, "loss": 0.7681, "step": 282 }, { "epoch": 0.84, "grad_norm": 1.7612589809223944, "learning_rate": 6.222088434895462e-07, "loss": 0.8392, "step": 283 }, { "epoch": 0.85, "grad_norm": 1.749669895373114, "learning_rate": 5.989930440994451e-07, "loss": 0.7651, "step": 284 }, { "epoch": 0.85, "grad_norm": 1.856400426555766, "learning_rate": 5.76191014116711e-07, "loss": 0.854, "step": 285 }, { "epoch": 0.85, "grad_norm": 1.786307733562441, "learning_rate": 5.538048973194699e-07, "loss": 0.8124, "step": 286 }, { "epoch": 0.86, "grad_norm": 1.7598458461279902, "learning_rate": 5.318367983829393e-07, "loss": 0.822, "step": 287 }, { "epoch": 0.86, "grad_norm": 1.6759526002862657, "learning_rate": 5.102887826815589e-07, "loss": 0.7365, "step": 288 }, { "epoch": 0.86, "grad_norm": 1.7936397119836314, "learning_rate": 4.891628760948114e-07, "loss": 0.7666, "step": 289 }, { "epoch": 0.87, "grad_norm": 1.8498525973183433, "learning_rate": 4.6846106481675035e-07, "loss": 0.8682, "step": 290 }, { "epoch": 0.87, "grad_norm": 1.7420235116184377, "learning_rate": 4.481852951692672e-07, "loss": 0.7485, "step": 291 }, { "epoch": 0.87, "grad_norm": 1.877611598521712, "learning_rate": 4.283374734191037e-07, "loss": 0.9166, "step": 292 }, { "epoch": 0.87, "grad_norm": 1.9492518664893324, "learning_rate": 4.089194655986306e-07, "loss": 0.7579, "step": 293 }, { "epoch": 0.88, "grad_norm": 1.9457494252701653, "learning_rate": 3.899330973304083e-07, "loss": 0.9035, "step": 294 }, { "epoch": 0.88, "grad_norm": 1.9791967456606612, "learning_rate": 3.7138015365554834e-07, "loss": 0.7953, "step": 295 }, { "epoch": 0.88, "grad_norm": 1.794711427524111, "learning_rate": 3.5326237886588734e-07, "loss": 0.847, "step": 296 }, { "epoch": 0.89, "grad_norm": 1.8794850272880976, "learning_rate": 3.355814763399973e-07, "loss": 0.8514, "step": 297 }, { "epoch": 0.89, "grad_norm": 1.790194856863824, "learning_rate": 3.183391083830345e-07, "loss": 0.8278, "step": 298 }, { "epoch": 0.89, "grad_norm": 1.8717181093829611, "learning_rate": 3.015368960704584e-07, "loss": 0.8429, "step": 299 }, { "epoch": 0.9, "grad_norm": 1.7463890355109786, "learning_rate": 2.8517641909562075e-07, "loss": 0.8278, "step": 300 }, { "epoch": 0.9, "grad_norm": 1.855252737220356, "learning_rate": 2.6925921562124867e-07, "loss": 0.6983, "step": 301 }, { "epoch": 0.9, "grad_norm": 1.8352359161688767, "learning_rate": 2.5378678213483057e-07, "loss": 0.8415, "step": 302 }, { "epoch": 0.9, "grad_norm": 1.8346745144152707, "learning_rate": 2.3876057330792344e-07, "loss": 0.8257, "step": 303 }, { "epoch": 0.91, "grad_norm": 1.8337279964237236, "learning_rate": 2.2418200185938488e-07, "loss": 0.8155, "step": 304 }, { "epoch": 0.91, "grad_norm": 1.9002602072773978, "learning_rate": 2.1005243842255552e-07, "loss": 0.9803, "step": 305 }, { "epoch": 0.91, "grad_norm": 1.7966402315107077, "learning_rate": 1.9637321141639743e-07, "loss": 0.7586, "step": 306 }, { "epoch": 0.92, "grad_norm": 1.8900826834954527, "learning_rate": 1.8314560692059836e-07, "loss": 0.8701, "step": 307 }, { "epoch": 0.92, "grad_norm": 1.986051932218701, "learning_rate": 1.7037086855465902e-07, "loss": 0.854, "step": 308 }, { "epoch": 0.92, "grad_norm": 1.819134244934257, "learning_rate": 1.5805019736097105e-07, "loss": 0.8146, "step": 309 }, { "epoch": 0.93, "grad_norm": 1.7730810336503418, "learning_rate": 1.4618475169190017e-07, "loss": 0.8358, "step": 310 }, { "epoch": 0.93, "grad_norm": 1.8749266769059667, "learning_rate": 1.3477564710088097e-07, "loss": 0.8806, "step": 311 }, { "epoch": 0.93, "grad_norm": 1.7637217906423568, "learning_rate": 1.2382395623753484e-07, "loss": 0.8348, "step": 312 }, { "epoch": 0.93, "grad_norm": 1.7705930846058242, "learning_rate": 1.1333070874682217e-07, "loss": 0.8438, "step": 313 }, { "epoch": 0.94, "grad_norm": 1.9290513445644262, "learning_rate": 1.0329689117224262e-07, "loss": 0.8837, "step": 314 }, { "epoch": 0.94, "grad_norm": 1.7184442508285342, "learning_rate": 9.372344686307655e-08, "loss": 0.7542, "step": 315 }, { "epoch": 0.94, "grad_norm": 1.7508169719519233, "learning_rate": 8.461127588570039e-08, "loss": 0.8068, "step": 316 }, { "epoch": 0.95, "grad_norm": 2.072932043609583, "learning_rate": 7.59612349389599e-08, "loss": 0.8847, "step": 317 }, { "epoch": 0.95, "grad_norm": 1.8418737822218936, "learning_rate": 6.777413727363069e-08, "loss": 0.7607, "step": 318 }, { "epoch": 0.95, "grad_norm": 1.8728345287191652, "learning_rate": 6.005075261595495e-08, "loss": 0.8279, "step": 319 }, { "epoch": 0.96, "grad_norm": 1.9751421080539375, "learning_rate": 5.279180709527765e-08, "loss": 0.7666, "step": 320 }, { "epoch": 0.96, "grad_norm": 1.742439764943798, "learning_rate": 4.599798317577342e-08, "loss": 0.8479, "step": 321 }, { "epoch": 0.96, "grad_norm": 1.8692495061239316, "learning_rate": 3.9669919592288385e-08, "loss": 0.8109, "step": 322 }, { "epoch": 0.96, "grad_norm": 1.670765464035315, "learning_rate": 3.3808211290284886e-08, "loss": 0.7585, "step": 323 }, { "epoch": 0.97, "grad_norm": 2.062997844606162, "learning_rate": 2.8413409369907887e-08, "loss": 0.8709, "step": 324 }, { "epoch": 0.97, "grad_norm": 1.9091961716040289, "learning_rate": 2.3486021034170857e-08, "loss": 0.818, "step": 325 }, { "epoch": 0.97, "grad_norm": 1.8898711790354914, "learning_rate": 1.9026509541272276e-08, "loss": 0.8346, "step": 326 }, { "epoch": 0.98, "grad_norm": 1.6576693409221155, "learning_rate": 1.5035294161039882e-08, "loss": 0.7996, "step": 327 }, { "epoch": 0.98, "grad_norm": 1.7571989556959642, "learning_rate": 1.1512750135511674e-08, "loss": 0.7742, "step": 328 }, { "epoch": 0.98, "grad_norm": 1.8431201117480818, "learning_rate": 8.459208643659122e-09, "loss": 0.8858, "step": 329 }, { "epoch": 0.99, "grad_norm": 1.8220210754565818, "learning_rate": 5.874956770248186e-09, "loss": 0.8481, "step": 330 }, { "epoch": 0.99, "grad_norm": 1.8136218315284558, "learning_rate": 3.760237478849793e-09, "loss": 0.6899, "step": 331 }, { "epoch": 0.99, "grad_norm": 1.9765791609593397, "learning_rate": 2.1152495889970035e-09, "loss": 0.8208, "step": 332 }, { "epoch": 0.99, "grad_norm": 1.8275429866969195, "learning_rate": 9.401477574932927e-10, "loss": 0.8589, "step": 333 }, { "epoch": 1.0, "grad_norm": 1.8063515591696528, "learning_rate": 2.3504246386918394e-10, "loss": 0.781, "step": 334 }, { "epoch": 1.0, "grad_norm": 1.9072496888398838, "learning_rate": 0.0, "loss": 0.8156, "step": 335 }, { "epoch": 1.0, "step": 335, "total_flos": 79804470312960.0, "train_loss": 0.9642956034461064, "train_runtime": 7158.0735, "train_samples_per_second": 0.374, "train_steps_per_second": 0.047 } ], "logging_steps": 1.0, "max_steps": 335, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 2056, "total_flos": 79804470312960.0, "train_batch_size": 1, "trial_name": null, "trial_params": null }