| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 200, | |
| "global_step": 668, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0029940119760479044, | |
| "grad_norm": 2.360150470792367, | |
| "learning_rate": 9.999944704978835e-06, | |
| "loss": 0.6092, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005988023952095809, | |
| "grad_norm": 2.4295311904157155, | |
| "learning_rate": 9.999778821138357e-06, | |
| "loss": 0.575, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.008982035928143712, | |
| "grad_norm": 1.6045552308559259, | |
| "learning_rate": 9.999502352147583e-06, | |
| "loss": 0.5736, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.011976047904191617, | |
| "grad_norm": 1.7003243086131508, | |
| "learning_rate": 9.999115304121459e-06, | |
| "loss": 0.6014, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.014970059880239521, | |
| "grad_norm": 1.499989194091945, | |
| "learning_rate": 9.998617685620715e-06, | |
| "loss": 0.4662, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.017964071856287425, | |
| "grad_norm": 1.61955016345233, | |
| "learning_rate": 9.998009507651683e-06, | |
| "loss": 0.5257, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.020958083832335328, | |
| "grad_norm": 1.465934804743867, | |
| "learning_rate": 9.997290783666048e-06, | |
| "loss": 0.481, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.023952095808383235, | |
| "grad_norm": 1.4260547177810452, | |
| "learning_rate": 9.996461529560553e-06, | |
| "loss": 0.4611, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02694610778443114, | |
| "grad_norm": 1.594334635936141, | |
| "learning_rate": 9.995521763676645e-06, | |
| "loss": 0.5282, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.029940119760479042, | |
| "grad_norm": 1.5412598472431942, | |
| "learning_rate": 9.994471506800078e-06, | |
| "loss": 0.4832, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03293413173652695, | |
| "grad_norm": 1.3382556026543801, | |
| "learning_rate": 9.993310782160439e-06, | |
| "loss": 0.5119, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03592814371257485, | |
| "grad_norm": 1.3602716806719026, | |
| "learning_rate": 9.992039615430648e-06, | |
| "loss": 0.4866, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.038922155688622756, | |
| "grad_norm": 1.3257085493157688, | |
| "learning_rate": 9.99065803472638e-06, | |
| "loss": 0.456, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.041916167664670656, | |
| "grad_norm": 1.240022327929267, | |
| "learning_rate": 9.989166070605447e-06, | |
| "loss": 0.4411, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04491017964071856, | |
| "grad_norm": 1.1452990663610407, | |
| "learning_rate": 9.98756375606713e-06, | |
| "loss": 0.4269, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04790419161676647, | |
| "grad_norm": 1.3019072667574294, | |
| "learning_rate": 9.985851126551428e-06, | |
| "loss": 0.4653, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05089820359281437, | |
| "grad_norm": 0.9875023401666374, | |
| "learning_rate": 9.9840282199383e-06, | |
| "loss": 0.3829, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05389221556886228, | |
| "grad_norm": 1.0529387527844354, | |
| "learning_rate": 9.982095076546806e-06, | |
| "loss": 0.4399, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.05688622754491018, | |
| "grad_norm": 1.179874914954893, | |
| "learning_rate": 9.980051739134235e-06, | |
| "loss": 0.4457, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.059880239520958084, | |
| "grad_norm": 1.215394016628416, | |
| "learning_rate": 9.977898252895133e-06, | |
| "loss": 0.46, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06287425149700598, | |
| "grad_norm": 1.100471009595285, | |
| "learning_rate": 9.975634665460333e-06, | |
| "loss": 0.4327, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0658682634730539, | |
| "grad_norm": 1.1308041938018731, | |
| "learning_rate": 9.973261026895878e-06, | |
| "loss": 0.4032, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0688622754491018, | |
| "grad_norm": 1.0857215593281804, | |
| "learning_rate": 9.970777389701927e-06, | |
| "loss": 0.4898, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0718562874251497, | |
| "grad_norm": 1.0677097396510304, | |
| "learning_rate": 9.968183808811586e-06, | |
| "loss": 0.3944, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0748502994011976, | |
| "grad_norm": 1.2863204713458667, | |
| "learning_rate": 9.965480341589702e-06, | |
| "loss": 0.4591, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07784431137724551, | |
| "grad_norm": 1.099240850527482, | |
| "learning_rate": 9.962667047831585e-06, | |
| "loss": 0.4611, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08083832335329341, | |
| "grad_norm": 1.1178720677514475, | |
| "learning_rate": 9.95974398976169e-06, | |
| "loss": 0.4008, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08383233532934131, | |
| "grad_norm": 1.127386283354638, | |
| "learning_rate": 9.95671123203224e-06, | |
| "loss": 0.4507, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08682634730538923, | |
| "grad_norm": 1.2066334083956984, | |
| "learning_rate": 9.953568841721796e-06, | |
| "loss": 0.4805, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.08982035928143713, | |
| "grad_norm": 0.9775903615301047, | |
| "learning_rate": 9.950316888333775e-06, | |
| "loss": 0.3911, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09281437125748503, | |
| "grad_norm": 1.0222727254646968, | |
| "learning_rate": 9.946955443794908e-06, | |
| "loss": 0.4688, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09580838323353294, | |
| "grad_norm": 1.2017413132687296, | |
| "learning_rate": 9.943484582453653e-06, | |
| "loss": 0.4383, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09880239520958084, | |
| "grad_norm": 0.9625560512846535, | |
| "learning_rate": 9.939904381078553e-06, | |
| "loss": 0.3857, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10179640718562874, | |
| "grad_norm": 0.9265832067065958, | |
| "learning_rate": 9.93621491885653e-06, | |
| "loss": 0.3652, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.10479041916167664, | |
| "grad_norm": 1.09656335883221, | |
| "learning_rate": 9.932416277391144e-06, | |
| "loss": 0.4103, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10778443113772455, | |
| "grad_norm": 1.091981004351075, | |
| "learning_rate": 9.928508540700775e-06, | |
| "loss": 0.4308, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11077844311377245, | |
| "grad_norm": 1.1471983189479533, | |
| "learning_rate": 9.924491795216777e-06, | |
| "loss": 0.4722, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11377245508982035, | |
| "grad_norm": 1.1015423992585116, | |
| "learning_rate": 9.920366129781564e-06, | |
| "loss": 0.4317, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11676646706586827, | |
| "grad_norm": 0.9662315165782208, | |
| "learning_rate": 9.916131635646635e-06, | |
| "loss": 0.3857, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.11976047904191617, | |
| "grad_norm": 1.1042042362974234, | |
| "learning_rate": 9.91178840647057e-06, | |
| "loss": 0.4587, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12275449101796407, | |
| "grad_norm": 1.0967203910091963, | |
| "learning_rate": 9.907336538316946e-06, | |
| "loss": 0.4116, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.12574850299401197, | |
| "grad_norm": 0.9924909654132533, | |
| "learning_rate": 9.902776129652223e-06, | |
| "loss": 0.3572, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.12874251497005987, | |
| "grad_norm": 1.0528124902395053, | |
| "learning_rate": 9.898107281343557e-06, | |
| "loss": 0.3685, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1317365269461078, | |
| "grad_norm": 0.9511215627795053, | |
| "learning_rate": 9.893330096656576e-06, | |
| "loss": 0.3436, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1347305389221557, | |
| "grad_norm": 1.0836924506952512, | |
| "learning_rate": 9.888444681253087e-06, | |
| "loss": 0.4279, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1377245508982036, | |
| "grad_norm": 1.1151679775891317, | |
| "learning_rate": 9.883451143188753e-06, | |
| "loss": 0.4221, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1407185628742515, | |
| "grad_norm": 1.064674167680838, | |
| "learning_rate": 9.878349592910694e-06, | |
| "loss": 0.3527, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1437125748502994, | |
| "grad_norm": 1.1036459061321013, | |
| "learning_rate": 9.873140143255035e-06, | |
| "loss": 0.4467, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.1467065868263473, | |
| "grad_norm": 1.0400888612361692, | |
| "learning_rate": 9.867822909444435e-06, | |
| "loss": 0.4393, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1497005988023952, | |
| "grad_norm": 1.0251238188251106, | |
| "learning_rate": 9.862398009085511e-06, | |
| "loss": 0.4082, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15269461077844312, | |
| "grad_norm": 1.0567380954404586, | |
| "learning_rate": 9.856865562166256e-06, | |
| "loss": 0.3869, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.15568862275449102, | |
| "grad_norm": 1.0102720103461709, | |
| "learning_rate": 9.851225691053382e-06, | |
| "loss": 0.3592, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15868263473053892, | |
| "grad_norm": 1.1127063801621502, | |
| "learning_rate": 9.8454785204896e-06, | |
| "loss": 0.4344, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.16167664670658682, | |
| "grad_norm": 0.9065152442964788, | |
| "learning_rate": 9.83962417759088e-06, | |
| "loss": 0.3664, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16467065868263472, | |
| "grad_norm": 1.072288748663969, | |
| "learning_rate": 9.833662791843628e-06, | |
| "loss": 0.3651, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.16766467065868262, | |
| "grad_norm": 1.0107526179429653, | |
| "learning_rate": 9.827594495101824e-06, | |
| "loss": 0.3898, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17065868263473055, | |
| "grad_norm": 1.0293793758687437, | |
| "learning_rate": 9.821419421584108e-06, | |
| "loss": 0.4018, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.17365269461077845, | |
| "grad_norm": 1.040641029363235, | |
| "learning_rate": 9.815137707870806e-06, | |
| "loss": 0.4427, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17664670658682635, | |
| "grad_norm": 1.1346149341238732, | |
| "learning_rate": 9.808749492900917e-06, | |
| "loss": 0.3956, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.17964071856287425, | |
| "grad_norm": 1.002697448139183, | |
| "learning_rate": 9.802254917969033e-06, | |
| "loss": 0.4166, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18263473053892215, | |
| "grad_norm": 1.166908537675008, | |
| "learning_rate": 9.795654126722218e-06, | |
| "loss": 0.3991, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.18562874251497005, | |
| "grad_norm": 0.9918959030412066, | |
| "learning_rate": 9.788947265156828e-06, | |
| "loss": 0.3852, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18862275449101795, | |
| "grad_norm": 1.0324508520275155, | |
| "learning_rate": 9.782134481615282e-06, | |
| "loss": 0.3498, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.19161676646706588, | |
| "grad_norm": 1.0257932465807185, | |
| "learning_rate": 9.775215926782788e-06, | |
| "loss": 0.4258, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19461077844311378, | |
| "grad_norm": 1.0782444651324654, | |
| "learning_rate": 9.768191753683997e-06, | |
| "loss": 0.3829, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19760479041916168, | |
| "grad_norm": 1.0912461346785043, | |
| "learning_rate": 9.761062117679632e-06, | |
| "loss": 0.3816, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.20059880239520958, | |
| "grad_norm": 1.0614318310008004, | |
| "learning_rate": 9.75382717646304e-06, | |
| "loss": 0.3616, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.20359281437125748, | |
| "grad_norm": 1.1501837516722428, | |
| "learning_rate": 9.746487090056712e-06, | |
| "loss": 0.4522, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.20658682634730538, | |
| "grad_norm": 0.9006942564473596, | |
| "learning_rate": 9.739042020808746e-06, | |
| "loss": 0.3474, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.20958083832335328, | |
| "grad_norm": 1.019042335184148, | |
| "learning_rate": 9.73149213338924e-06, | |
| "loss": 0.4297, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2125748502994012, | |
| "grad_norm": 1.0261132022875048, | |
| "learning_rate": 9.72383759478667e-06, | |
| "loss": 0.433, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2155688622754491, | |
| "grad_norm": 1.0279884605503198, | |
| "learning_rate": 9.71607857430419e-06, | |
| "loss": 0.3331, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.218562874251497, | |
| "grad_norm": 1.10261368241984, | |
| "learning_rate": 9.708215243555875e-06, | |
| "loss": 0.4255, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2215568862275449, | |
| "grad_norm": 1.1938856577221162, | |
| "learning_rate": 9.700247776462944e-06, | |
| "loss": 0.3986, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.2245508982035928, | |
| "grad_norm": 1.1282208342635738, | |
| "learning_rate": 9.6921763492499e-06, | |
| "loss": 0.391, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2275449101796407, | |
| "grad_norm": 0.9900141058207153, | |
| "learning_rate": 9.68400114044064e-06, | |
| "loss": 0.3177, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.23053892215568864, | |
| "grad_norm": 1.0789984345178527, | |
| "learning_rate": 9.6757223308545e-06, | |
| "loss": 0.4004, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.23353293413173654, | |
| "grad_norm": 0.9894907044520515, | |
| "learning_rate": 9.667340103602263e-06, | |
| "loss": 0.4484, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23652694610778444, | |
| "grad_norm": 0.9965832888156311, | |
| "learning_rate": 9.658854644082099e-06, | |
| "loss": 0.3594, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.23952095808383234, | |
| "grad_norm": 1.2512456460659447, | |
| "learning_rate": 9.650266139975474e-06, | |
| "loss": 0.4393, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.24251497005988024, | |
| "grad_norm": 1.1185407406813124, | |
| "learning_rate": 9.641574781242999e-06, | |
| "loss": 0.384, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.24550898203592814, | |
| "grad_norm": 1.079792166966554, | |
| "learning_rate": 9.632780760120217e-06, | |
| "loss": 0.3987, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.24850299401197604, | |
| "grad_norm": 1.0335442030139939, | |
| "learning_rate": 9.62388427111336e-06, | |
| "loss": 0.3622, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.25149700598802394, | |
| "grad_norm": 0.9758738026602853, | |
| "learning_rate": 9.614885510995047e-06, | |
| "loss": 0.3328, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25449101796407186, | |
| "grad_norm": 1.0689976945861128, | |
| "learning_rate": 9.605784678799934e-06, | |
| "loss": 0.3807, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.25748502994011974, | |
| "grad_norm": 1.03144717267387, | |
| "learning_rate": 9.596581975820304e-06, | |
| "loss": 0.3869, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.26047904191616766, | |
| "grad_norm": 1.2177802527359998, | |
| "learning_rate": 9.587277605601617e-06, | |
| "loss": 0.4208, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.2634730538922156, | |
| "grad_norm": 1.1785454020154376, | |
| "learning_rate": 9.577871773938013e-06, | |
| "loss": 0.4236, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.26646706586826346, | |
| "grad_norm": 1.1125909821479287, | |
| "learning_rate": 9.568364688867757e-06, | |
| "loss": 0.4873, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2694610778443114, | |
| "grad_norm": 1.041716945636208, | |
| "learning_rate": 9.558756560668637e-06, | |
| "loss": 0.3689, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.27245508982035926, | |
| "grad_norm": 1.1972249911203883, | |
| "learning_rate": 9.549047601853313e-06, | |
| "loss": 0.3904, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2754491017964072, | |
| "grad_norm": 1.2145517606375182, | |
| "learning_rate": 9.539238027164618e-06, | |
| "loss": 0.4117, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.27844311377245506, | |
| "grad_norm": 1.0847366638843148, | |
| "learning_rate": 9.52932805357081e-06, | |
| "loss": 0.3886, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.281437125748503, | |
| "grad_norm": 1.107223706682208, | |
| "learning_rate": 9.519317900260769e-06, | |
| "loss": 0.4234, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2844311377245509, | |
| "grad_norm": 1.0512423029389995, | |
| "learning_rate": 9.509207788639148e-06, | |
| "loss": 0.4119, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2874251497005988, | |
| "grad_norm": 1.0125109372778616, | |
| "learning_rate": 9.498997942321484e-06, | |
| "loss": 0.4155, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2904191616766467, | |
| "grad_norm": 0.9459988882190549, | |
| "learning_rate": 9.488688587129243e-06, | |
| "loss": 0.4266, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.2934131736526946, | |
| "grad_norm": 1.159832613070711, | |
| "learning_rate": 9.47827995108483e-06, | |
| "loss": 0.3686, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2964071856287425, | |
| "grad_norm": 1.2126346435851747, | |
| "learning_rate": 9.467772264406545e-06, | |
| "loss": 0.3967, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2994011976047904, | |
| "grad_norm": 0.9662788722957085, | |
| "learning_rate": 9.457165759503492e-06, | |
| "loss": 0.3529, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3023952095808383, | |
| "grad_norm": 1.0465067846012959, | |
| "learning_rate": 9.446460670970436e-06, | |
| "loss": 0.3625, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.30538922155688625, | |
| "grad_norm": 1.0771426905673824, | |
| "learning_rate": 9.435657235582616e-06, | |
| "loss": 0.321, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3083832335329341, | |
| "grad_norm": 1.170291037733682, | |
| "learning_rate": 9.424755692290507e-06, | |
| "loss": 0.396, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.31137724550898205, | |
| "grad_norm": 1.1608390028269628, | |
| "learning_rate": 9.413756282214538e-06, | |
| "loss": 0.4272, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3143712574850299, | |
| "grad_norm": 0.9854661428873333, | |
| "learning_rate": 9.402659248639749e-06, | |
| "loss": 0.3792, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.31736526946107785, | |
| "grad_norm": 1.0205302189095444, | |
| "learning_rate": 9.391464837010428e-06, | |
| "loss": 0.376, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3203592814371258, | |
| "grad_norm": 1.1245385463246433, | |
| "learning_rate": 9.380173294924661e-06, | |
| "loss": 0.4283, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.32335329341317365, | |
| "grad_norm": 1.063144047888206, | |
| "learning_rate": 9.368784872128877e-06, | |
| "loss": 0.3879, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3263473053892216, | |
| "grad_norm": 0.9722099661439674, | |
| "learning_rate": 9.357299820512305e-06, | |
| "loss": 0.3712, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.32934131736526945, | |
| "grad_norm": 1.0475391247605021, | |
| "learning_rate": 9.345718394101412e-06, | |
| "loss": 0.3707, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3323353293413174, | |
| "grad_norm": 1.058149177559626, | |
| "learning_rate": 9.334040849054288e-06, | |
| "loss": 0.4115, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.33532934131736525, | |
| "grad_norm": 1.088390795361788, | |
| "learning_rate": 9.322267443654974e-06, | |
| "loss": 0.3874, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3383233532934132, | |
| "grad_norm": 1.1013443338361675, | |
| "learning_rate": 9.310398438307747e-06, | |
| "loss": 0.4323, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3413173652694611, | |
| "grad_norm": 0.9944967696397207, | |
| "learning_rate": 9.29843409553137e-06, | |
| "loss": 0.3571, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.344311377245509, | |
| "grad_norm": 0.9723803949692649, | |
| "learning_rate": 9.286374679953278e-06, | |
| "loss": 0.3619, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3473053892215569, | |
| "grad_norm": 1.0263431397562555, | |
| "learning_rate": 9.274220458303727e-06, | |
| "loss": 0.3407, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3502994011976048, | |
| "grad_norm": 1.0463627357456768, | |
| "learning_rate": 9.261971699409893e-06, | |
| "loss": 0.4016, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3532934131736527, | |
| "grad_norm": 1.07587077688648, | |
| "learning_rate": 9.249628674189928e-06, | |
| "loss": 0.3761, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3562874251497006, | |
| "grad_norm": 1.0847776478670657, | |
| "learning_rate": 9.237191655646972e-06, | |
| "loss": 0.4102, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3592814371257485, | |
| "grad_norm": 1.11812369249786, | |
| "learning_rate": 9.224660918863104e-06, | |
| "loss": 0.4422, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.36227544910179643, | |
| "grad_norm": 1.135985769802511, | |
| "learning_rate": 9.212036740993265e-06, | |
| "loss": 0.391, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3652694610778443, | |
| "grad_norm": 1.080200371957256, | |
| "learning_rate": 9.199319401259132e-06, | |
| "loss": 0.3878, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.36826347305389223, | |
| "grad_norm": 1.099637838806261, | |
| "learning_rate": 9.186509180942928e-06, | |
| "loss": 0.3652, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.3712574850299401, | |
| "grad_norm": 1.0554720701766402, | |
| "learning_rate": 9.173606363381218e-06, | |
| "loss": 0.4284, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.37425149700598803, | |
| "grad_norm": 1.0455932007891395, | |
| "learning_rate": 9.16061123395863e-06, | |
| "loss": 0.3833, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3772455089820359, | |
| "grad_norm": 1.1376490909593353, | |
| "learning_rate": 9.147524080101543e-06, | |
| "loss": 0.3876, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.38023952095808383, | |
| "grad_norm": 1.188836716557053, | |
| "learning_rate": 9.134345191271742e-06, | |
| "loss": 0.4461, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.38323353293413176, | |
| "grad_norm": 1.1491556565938952, | |
| "learning_rate": 9.121074858959997e-06, | |
| "loss": 0.4208, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.38622754491017963, | |
| "grad_norm": 0.9829441169608918, | |
| "learning_rate": 9.107713376679634e-06, | |
| "loss": 0.3933, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.38922155688622756, | |
| "grad_norm": 1.1745728924206915, | |
| "learning_rate": 9.094261039960028e-06, | |
| "loss": 0.4012, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.39221556886227543, | |
| "grad_norm": 1.18181153163394, | |
| "learning_rate": 9.08071814634008e-06, | |
| "loss": 0.4552, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.39520958083832336, | |
| "grad_norm": 1.1433168380702565, | |
| "learning_rate": 9.067084995361623e-06, | |
| "loss": 0.395, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.39820359281437123, | |
| "grad_norm": 1.166913861368381, | |
| "learning_rate": 9.053361888562807e-06, | |
| "loss": 0.4411, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.40119760479041916, | |
| "grad_norm": 1.1022003304573305, | |
| "learning_rate": 9.039549129471423e-06, | |
| "loss": 0.3753, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4041916167664671, | |
| "grad_norm": 0.9195981793191216, | |
| "learning_rate": 9.025647023598196e-06, | |
| "loss": 0.353, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.40718562874251496, | |
| "grad_norm": 1.039297953987639, | |
| "learning_rate": 9.011655878430018e-06, | |
| "loss": 0.3611, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.4101796407185629, | |
| "grad_norm": 1.161126504900604, | |
| "learning_rate": 8.99757600342316e-06, | |
| "loss": 0.423, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.41317365269461076, | |
| "grad_norm": 1.1493232149734816, | |
| "learning_rate": 8.983407709996415e-06, | |
| "loss": 0.4031, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4161676646706587, | |
| "grad_norm": 1.1232381761248593, | |
| "learning_rate": 8.969151311524215e-06, | |
| "loss": 0.3778, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.41916167664670656, | |
| "grad_norm": 1.1797682325377319, | |
| "learning_rate": 8.954807123329703e-06, | |
| "loss": 0.4066, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4221556886227545, | |
| "grad_norm": 0.9898700775146696, | |
| "learning_rate": 8.940375462677758e-06, | |
| "loss": 0.3396, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4251497005988024, | |
| "grad_norm": 1.0989992916462306, | |
| "learning_rate": 8.92585664876797e-06, | |
| "loss": 0.3787, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4281437125748503, | |
| "grad_norm": 0.9751743477236701, | |
| "learning_rate": 8.911251002727588e-06, | |
| "loss": 0.3697, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4311377245508982, | |
| "grad_norm": 1.0020331833699752, | |
| "learning_rate": 8.896558847604414e-06, | |
| "loss": 0.3543, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4341317365269461, | |
| "grad_norm": 1.0479535082629463, | |
| "learning_rate": 8.881780508359661e-06, | |
| "loss": 0.4605, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.437125748502994, | |
| "grad_norm": 1.321950771951831, | |
| "learning_rate": 8.86691631186076e-06, | |
| "loss": 0.392, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.44011976047904194, | |
| "grad_norm": 0.8988900523275937, | |
| "learning_rate": 8.851966586874138e-06, | |
| "loss": 0.3388, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4431137724550898, | |
| "grad_norm": 1.0463909572882797, | |
| "learning_rate": 8.836931664057935e-06, | |
| "loss": 0.3617, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.44610778443113774, | |
| "grad_norm": 1.1289518767389093, | |
| "learning_rate": 8.821811875954705e-06, | |
| "loss": 0.388, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4491017964071856, | |
| "grad_norm": 1.0553303120139725, | |
| "learning_rate": 8.806607556984045e-06, | |
| "loss": 0.3405, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.45209580838323354, | |
| "grad_norm": 1.0337259853792555, | |
| "learning_rate": 8.791319043435213e-06, | |
| "loss": 0.3749, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4550898203592814, | |
| "grad_norm": 0.9697649279113124, | |
| "learning_rate": 8.775946673459682e-06, | |
| "loss": 0.3803, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.45808383233532934, | |
| "grad_norm": 1.0554720933913004, | |
| "learning_rate": 8.76049078706366e-06, | |
| "loss": 0.3547, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.46107784431137727, | |
| "grad_norm": 1.1105934710364926, | |
| "learning_rate": 8.744951726100572e-06, | |
| "loss": 0.3338, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.46407185628742514, | |
| "grad_norm": 1.2520450241935366, | |
| "learning_rate": 8.729329834263503e-06, | |
| "loss": 0.3677, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.46706586826347307, | |
| "grad_norm": 1.1146784316043523, | |
| "learning_rate": 8.713625457077585e-06, | |
| "loss": 0.3994, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.47005988023952094, | |
| "grad_norm": 1.0874986898542098, | |
| "learning_rate": 8.697838941892371e-06, | |
| "loss": 0.354, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.47305389221556887, | |
| "grad_norm": 1.0973091543852385, | |
| "learning_rate": 8.681970637874131e-06, | |
| "loss": 0.4154, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.47604790419161674, | |
| "grad_norm": 1.1625464475176286, | |
| "learning_rate": 8.666020895998154e-06, | |
| "loss": 0.3891, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.47904191616766467, | |
| "grad_norm": 1.161010302826186, | |
| "learning_rate": 8.64999006904096e-06, | |
| "loss": 0.3961, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4820359281437126, | |
| "grad_norm": 1.1337346146886311, | |
| "learning_rate": 8.63387851157252e-06, | |
| "loss": 0.3942, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.48502994011976047, | |
| "grad_norm": 1.0391529706550826, | |
| "learning_rate": 8.617686579948396e-06, | |
| "loss": 0.3394, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4880239520958084, | |
| "grad_norm": 1.1236983918674, | |
| "learning_rate": 8.60141463230187e-06, | |
| "loss": 0.3576, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.49101796407185627, | |
| "grad_norm": 1.1148066475922545, | |
| "learning_rate": 8.585063028536015e-06, | |
| "loss": 0.4442, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4940119760479042, | |
| "grad_norm": 1.0243204434346274, | |
| "learning_rate": 8.568632130315747e-06, | |
| "loss": 0.3854, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.49700598802395207, | |
| "grad_norm": 1.059220388171805, | |
| "learning_rate": 8.552122301059807e-06, | |
| "loss": 0.3738, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.0387730035667786, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 0.36, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5029940119760479, | |
| "grad_norm": 1.0439677605881634, | |
| "learning_rate": 8.518867311836808e-06, | |
| "loss": 0.4093, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5059880239520959, | |
| "grad_norm": 1.1135308466754046, | |
| "learning_rate": 8.502122887403882e-06, | |
| "loss": 0.4136, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5089820359281437, | |
| "grad_norm": 1.2076685795281583, | |
| "learning_rate": 8.485301002987285e-06, | |
| "loss": 0.3306, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5119760479041916, | |
| "grad_norm": 1.0431456753366977, | |
| "learning_rate": 8.468402030653598e-06, | |
| "loss": 0.4271, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5149700598802395, | |
| "grad_norm": 1.004312710195015, | |
| "learning_rate": 8.451426344174433e-06, | |
| "loss": 0.3612, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5179640718562875, | |
| "grad_norm": 1.1763053752804515, | |
| "learning_rate": 8.434374319018165e-06, | |
| "loss": 0.4386, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5209580838323353, | |
| "grad_norm": 1.002861520774924, | |
| "learning_rate": 8.417246332341638e-06, | |
| "loss": 0.3607, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5239520958083832, | |
| "grad_norm": 1.0163281602743386, | |
| "learning_rate": 8.4000427629818e-06, | |
| "loss": 0.3742, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5269461077844312, | |
| "grad_norm": 1.1136294757855683, | |
| "learning_rate": 8.382763991447344e-06, | |
| "loss": 0.41, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5299401197604791, | |
| "grad_norm": 1.1390366644182248, | |
| "learning_rate": 8.365410399910287e-06, | |
| "loss": 0.3872, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5329341317365269, | |
| "grad_norm": 1.2909433668063384, | |
| "learning_rate": 8.347982372197515e-06, | |
| "loss": 0.3782, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5359281437125748, | |
| "grad_norm": 1.0094599265789481, | |
| "learning_rate": 8.33048029378229e-06, | |
| "loss": 0.3918, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5389221556886228, | |
| "grad_norm": 0.9603157918644131, | |
| "learning_rate": 8.312904551775731e-06, | |
| "loss": 0.3454, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5419161676646707, | |
| "grad_norm": 0.977672929873693, | |
| "learning_rate": 8.295255534918249e-06, | |
| "loss": 0.3825, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5449101796407185, | |
| "grad_norm": 1.213979340299757, | |
| "learning_rate": 8.277533633570948e-06, | |
| "loss": 0.4422, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5479041916167665, | |
| "grad_norm": 1.0327111193464191, | |
| "learning_rate": 8.25973923970699e-06, | |
| "loss": 0.4067, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5508982035928144, | |
| "grad_norm": 0.9574992889413487, | |
| "learning_rate": 8.241872746902934e-06, | |
| "loss": 0.4474, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5538922155688623, | |
| "grad_norm": 1.0271449481355721, | |
| "learning_rate": 8.223934550330015e-06, | |
| "loss": 0.3573, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5568862275449101, | |
| "grad_norm": 1.085871604199334, | |
| "learning_rate": 8.20592504674542e-06, | |
| "loss": 0.4165, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.5598802395209581, | |
| "grad_norm": 0.9177887450776159, | |
| "learning_rate": 8.187844634483495e-06, | |
| "loss": 0.4189, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.562874251497006, | |
| "grad_norm": 1.145875691459872, | |
| "learning_rate": 8.16969371344696e-06, | |
| "loss": 0.4779, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5658682634730539, | |
| "grad_norm": 0.9058380786968147, | |
| "learning_rate": 8.151472685098037e-06, | |
| "loss": 0.387, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5688622754491018, | |
| "grad_norm": 0.9918785343604253, | |
| "learning_rate": 8.13318195244958e-06, | |
| "loss": 0.3642, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5718562874251497, | |
| "grad_norm": 1.0094499454429962, | |
| "learning_rate": 8.114821920056177e-06, | |
| "loss": 0.387, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5748502994011976, | |
| "grad_norm": 1.0117656185636714, | |
| "learning_rate": 8.096392994005177e-06, | |
| "loss": 0.4095, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5778443113772455, | |
| "grad_norm": 0.8813749104246654, | |
| "learning_rate": 8.077895581907719e-06, | |
| "loss": 0.3745, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5808383233532934, | |
| "grad_norm": 0.8787597823406559, | |
| "learning_rate": 8.059330092889724e-06, | |
| "loss": 0.3363, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5838323353293413, | |
| "grad_norm": 0.9920644099714983, | |
| "learning_rate": 8.040696937582833e-06, | |
| "loss": 0.3818, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5868263473053892, | |
| "grad_norm": 1.0660433857696556, | |
| "learning_rate": 8.021996528115335e-06, | |
| "loss": 0.3495, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5898203592814372, | |
| "grad_norm": 1.0694059285104096, | |
| "learning_rate": 8.003229278103044e-06, | |
| "loss": 0.3617, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.592814371257485, | |
| "grad_norm": 1.06330338919478, | |
| "learning_rate": 7.984395602640153e-06, | |
| "loss": 0.3655, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5958083832335329, | |
| "grad_norm": 1.1525243383043442, | |
| "learning_rate": 7.96549591829006e-06, | |
| "loss": 0.379, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5988023952095808, | |
| "grad_norm": 0.9230482284229541, | |
| "learning_rate": 7.946530643076138e-06, | |
| "loss": 0.3274, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5988023952095808, | |
| "eval_loss": 0.37389740347862244, | |
| "eval_runtime": 2.0945, | |
| "eval_samples_per_second": 25.782, | |
| "eval_steps_per_second": 6.684, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6017964071856288, | |
| "grad_norm": 1.0019766539666788, | |
| "learning_rate": 7.927500196472506e-06, | |
| "loss": 0.3823, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6047904191616766, | |
| "grad_norm": 1.1123295404323308, | |
| "learning_rate": 7.908404999394747e-06, | |
| "loss": 0.3286, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6077844311377245, | |
| "grad_norm": 1.0548653358670876, | |
| "learning_rate": 7.889245474190588e-06, | |
| "loss": 0.3759, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6107784431137725, | |
| "grad_norm": 1.2433092859069619, | |
| "learning_rate": 7.870022044630569e-06, | |
| "loss": 0.4113, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6137724550898204, | |
| "grad_norm": 0.990791990832856, | |
| "learning_rate": 7.85073513589867e-06, | |
| "loss": 0.3817, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6167664670658682, | |
| "grad_norm": 1.2594009600174254, | |
| "learning_rate": 7.831385174582901e-06, | |
| "loss": 0.3861, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6197604790419161, | |
| "grad_norm": 1.0197727813815136, | |
| "learning_rate": 7.81197258866587e-06, | |
| "loss": 0.3858, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6227544910179641, | |
| "grad_norm": 1.029024178798685, | |
| "learning_rate": 7.792497807515317e-06, | |
| "loss": 0.3405, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.625748502994012, | |
| "grad_norm": 1.0843471502861493, | |
| "learning_rate": 7.772961261874615e-06, | |
| "loss": 0.3311, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6287425149700598, | |
| "grad_norm": 1.0932823908799638, | |
| "learning_rate": 7.75336338385325e-06, | |
| "loss": 0.3674, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6317365269461078, | |
| "grad_norm": 1.0982688835218217, | |
| "learning_rate": 7.733704606917248e-06, | |
| "loss": 0.399, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6347305389221557, | |
| "grad_norm": 1.0360696347675225, | |
| "learning_rate": 7.713985365879607e-06, | |
| "loss": 0.343, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6377245508982036, | |
| "grad_norm": 1.0069925186594795, | |
| "learning_rate": 7.694206096890667e-06, | |
| "loss": 0.3541, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6407185628742516, | |
| "grad_norm": 0.9610077306255195, | |
| "learning_rate": 7.674367237428467e-06, | |
| "loss": 0.3386, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6437125748502994, | |
| "grad_norm": 0.9560418587007402, | |
| "learning_rate": 7.654469226289068e-06, | |
| "loss": 0.3263, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6467065868263473, | |
| "grad_norm": 1.0272151882071272, | |
| "learning_rate": 7.63451250357685e-06, | |
| "loss": 0.3906, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6497005988023952, | |
| "grad_norm": 1.0602665340805821, | |
| "learning_rate": 7.614497510694774e-06, | |
| "loss": 0.3981, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6526946107784432, | |
| "grad_norm": 1.091109550941202, | |
| "learning_rate": 7.5944246903346204e-06, | |
| "loss": 0.3585, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.655688622754491, | |
| "grad_norm": 0.973104649648916, | |
| "learning_rate": 7.574294486467204e-06, | |
| "loss": 0.3523, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6586826347305389, | |
| "grad_norm": 1.0388023904136494, | |
| "learning_rate": 7.55410734433254e-06, | |
| "loss": 0.3984, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6616766467065869, | |
| "grad_norm": 1.1132398868564881, | |
| "learning_rate": 7.533863710430011e-06, | |
| "loss": 0.4175, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6646706586826348, | |
| "grad_norm": 1.0247318498739135, | |
| "learning_rate": 7.513564032508484e-06, | |
| "loss": 0.349, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6676646706586826, | |
| "grad_norm": 1.162703731242435, | |
| "learning_rate": 7.493208759556406e-06, | |
| "loss": 0.4039, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6706586826347305, | |
| "grad_norm": 1.151319734294355, | |
| "learning_rate": 7.472798341791877e-06, | |
| "loss": 0.4364, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.6736526946107785, | |
| "grad_norm": 1.018414882097199, | |
| "learning_rate": 7.452333230652688e-06, | |
| "loss": 0.3628, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6766467065868264, | |
| "grad_norm": 0.9734290818533377, | |
| "learning_rate": 7.431813878786343e-06, | |
| "loss": 0.3525, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6796407185628742, | |
| "grad_norm": 1.0981823419124637, | |
| "learning_rate": 7.4112407400400395e-06, | |
| "loss": 0.3984, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6826347305389222, | |
| "grad_norm": 1.0082580347081591, | |
| "learning_rate": 7.390614269450633e-06, | |
| "loss": 0.3661, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6856287425149701, | |
| "grad_norm": 0.9094922182052086, | |
| "learning_rate": 7.369934923234577e-06, | |
| "loss": 0.3461, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.688622754491018, | |
| "grad_norm": 1.1091280652518132, | |
| "learning_rate": 7.349203158777826e-06, | |
| "loss": 0.4246, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6916167664670658, | |
| "grad_norm": 1.1048056691020671, | |
| "learning_rate": 7.32841943462572e-06, | |
| "loss": 0.3855, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6946107784431138, | |
| "grad_norm": 1.0980287910054944, | |
| "learning_rate": 7.3075842104728445e-06, | |
| "loss": 0.3826, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6976047904191617, | |
| "grad_norm": 1.0677026866192707, | |
| "learning_rate": 7.286697947152868e-06, | |
| "loss": 0.3972, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7005988023952096, | |
| "grad_norm": 1.0422936388009736, | |
| "learning_rate": 7.265761106628338e-06, | |
| "loss": 0.3628, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7035928143712575, | |
| "grad_norm": 0.9387475861215742, | |
| "learning_rate": 7.244774151980466e-06, | |
| "loss": 0.3556, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7065868263473054, | |
| "grad_norm": 1.2605182184599664, | |
| "learning_rate": 7.223737547398898e-06, | |
| "loss": 0.3689, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7095808383233533, | |
| "grad_norm": 1.06340308742501, | |
| "learning_rate": 7.20265175817143e-06, | |
| "loss": 0.3811, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7125748502994012, | |
| "grad_norm": 1.1884369289045194, | |
| "learning_rate": 7.181517250673729e-06, | |
| "loss": 0.4115, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7155688622754491, | |
| "grad_norm": 1.0604750281111033, | |
| "learning_rate": 7.1603344923590065e-06, | |
| "loss": 0.368, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.718562874251497, | |
| "grad_norm": 0.9884120920758936, | |
| "learning_rate": 7.139103951747694e-06, | |
| "loss": 0.3964, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7215568862275449, | |
| "grad_norm": 0.9882901892475843, | |
| "learning_rate": 7.1178260984170675e-06, | |
| "loss": 0.3402, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7245508982035929, | |
| "grad_norm": 1.0159073940426744, | |
| "learning_rate": 7.0965014029908654e-06, | |
| "loss": 0.3387, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7275449101796407, | |
| "grad_norm": 0.9732761192755145, | |
| "learning_rate": 7.075130337128883e-06, | |
| "loss": 0.3756, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7305389221556886, | |
| "grad_norm": 1.1181467661716453, | |
| "learning_rate": 7.053713373516538e-06, | |
| "loss": 0.3976, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7335329341317365, | |
| "grad_norm": 0.9914415495598553, | |
| "learning_rate": 7.03225098585441e-06, | |
| "loss": 0.406, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7365269461077845, | |
| "grad_norm": 0.9349380637899489, | |
| "learning_rate": 7.0107436488477694e-06, | |
| "loss": 0.3858, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7395209580838323, | |
| "grad_norm": 1.0671641205988223, | |
| "learning_rate": 6.989191838196083e-06, | |
| "loss": 0.4026, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7425149700598802, | |
| "grad_norm": 1.0353222194746492, | |
| "learning_rate": 6.9675960305824785e-06, | |
| "loss": 0.3536, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7455089820359282, | |
| "grad_norm": 1.1807206074512484, | |
| "learning_rate": 6.945956703663212e-06, | |
| "loss": 0.3996, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7485029940119761, | |
| "grad_norm": 0.9688487164109857, | |
| "learning_rate": 6.9242743360570985e-06, | |
| "loss": 0.3628, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.7514970059880239, | |
| "grad_norm": 1.198625172577203, | |
| "learning_rate": 6.9025494073349284e-06, | |
| "loss": 0.4126, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.7544910179640718, | |
| "grad_norm": 0.9178690511255925, | |
| "learning_rate": 6.880782398008862e-06, | |
| "loss": 0.3751, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.7574850299401198, | |
| "grad_norm": 1.1026984639475659, | |
| "learning_rate": 6.858973789521792e-06, | |
| "loss": 0.3417, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7604790419161677, | |
| "grad_norm": 1.0833627322649082, | |
| "learning_rate": 6.837124064236709e-06, | |
| "loss": 0.4053, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.7634730538922155, | |
| "grad_norm": 0.9820957906659576, | |
| "learning_rate": 6.815233705426019e-06, | |
| "loss": 0.3697, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7664670658682635, | |
| "grad_norm": 1.0455648368913306, | |
| "learning_rate": 6.7933031972608644e-06, | |
| "loss": 0.401, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.7694610778443114, | |
| "grad_norm": 0.9814496368114252, | |
| "learning_rate": 6.771333024800411e-06, | |
| "loss": 0.3802, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7724550898203593, | |
| "grad_norm": 1.1922223059369825, | |
| "learning_rate": 6.74932367398112e-06, | |
| "loss": 0.3754, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.7754491017964071, | |
| "grad_norm": 1.011433354510362, | |
| "learning_rate": 6.727275631605996e-06, | |
| "loss": 0.3292, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7784431137724551, | |
| "grad_norm": 1.0736498595732458, | |
| "learning_rate": 6.70518938533383e-06, | |
| "loss": 0.3712, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.781437125748503, | |
| "grad_norm": 1.1162157690090078, | |
| "learning_rate": 6.683065423668403e-06, | |
| "loss": 0.3894, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7844311377245509, | |
| "grad_norm": 1.134092488993485, | |
| "learning_rate": 6.660904235947687e-06, | |
| "loss": 0.3691, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.7874251497005988, | |
| "grad_norm": 1.012100290801701, | |
| "learning_rate": 6.638706312333018e-06, | |
| "loss": 0.3894, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7904191616766467, | |
| "grad_norm": 1.0528739506756517, | |
| "learning_rate": 6.61647214379826e-06, | |
| "loss": 0.4274, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7934131736526946, | |
| "grad_norm": 1.1867599275325125, | |
| "learning_rate": 6.594202222118941e-06, | |
| "loss": 0.3937, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7964071856287425, | |
| "grad_norm": 1.0932639266145312, | |
| "learning_rate": 6.571897039861377e-06, | |
| "loss": 0.375, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7994011976047904, | |
| "grad_norm": 1.02130071424371, | |
| "learning_rate": 6.549557090371775e-06, | |
| "loss": 0.3882, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8023952095808383, | |
| "grad_norm": 0.9547529629061537, | |
| "learning_rate": 6.527182867765333e-06, | |
| "loss": 0.2852, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8053892215568862, | |
| "grad_norm": 1.0800225605936187, | |
| "learning_rate": 6.504774866915291e-06, | |
| "loss": 0.367, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8083832335329342, | |
| "grad_norm": 1.1361032394179078, | |
| "learning_rate": 6.482333583442002e-06, | |
| "loss": 0.3461, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.811377245508982, | |
| "grad_norm": 0.9405938729804465, | |
| "learning_rate": 6.459859513701967e-06, | |
| "loss": 0.3741, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8143712574850299, | |
| "grad_norm": 1.2342436132113987, | |
| "learning_rate": 6.437353154776848e-06, | |
| "loss": 0.3792, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8173652694610778, | |
| "grad_norm": 1.0822093992034192, | |
| "learning_rate": 6.414815004462483e-06, | |
| "loss": 0.412, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8203592814371258, | |
| "grad_norm": 1.0665482591873432, | |
| "learning_rate": 6.3922455612578715e-06, | |
| "loss": 0.4108, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8233532934131736, | |
| "grad_norm": 1.0201014669636055, | |
| "learning_rate": 6.369645324354149e-06, | |
| "loss": 0.3203, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8263473053892215, | |
| "grad_norm": 0.9313182024828596, | |
| "learning_rate": 6.3470147936235485e-06, | |
| "loss": 0.3795, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8293413173652695, | |
| "grad_norm": 0.8738812181770729, | |
| "learning_rate": 6.3243544696083355e-06, | |
| "loss": 0.341, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8323353293413174, | |
| "grad_norm": 0.9646214219623503, | |
| "learning_rate": 6.301664853509755e-06, | |
| "loss": 0.3233, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8353293413173652, | |
| "grad_norm": 0.9453214555432041, | |
| "learning_rate": 6.278946447176924e-06, | |
| "loss": 0.3685, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8383233532934131, | |
| "grad_norm": 0.9155013605437612, | |
| "learning_rate": 6.256199753095745e-06, | |
| "loss": 0.339, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8413173652694611, | |
| "grad_norm": 0.9723301479000019, | |
| "learning_rate": 6.233425274377793e-06, | |
| "loss": 0.3649, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.844311377245509, | |
| "grad_norm": 1.3034219661741946, | |
| "learning_rate": 6.21062351474918e-06, | |
| "loss": 0.414, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.8473053892215568, | |
| "grad_norm": 1.0763031079194918, | |
| "learning_rate": 6.18779497853942e-06, | |
| "loss": 0.3913, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.8502994011976048, | |
| "grad_norm": 1.060826963618418, | |
| "learning_rate": 6.164940170670266e-06, | |
| "loss": 0.402, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8532934131736527, | |
| "grad_norm": 0.8904854078118, | |
| "learning_rate": 6.142059596644557e-06, | |
| "loss": 0.3196, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8562874251497006, | |
| "grad_norm": 1.0443174454117718, | |
| "learning_rate": 6.11915376253502e-06, | |
| "loss": 0.349, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8592814371257484, | |
| "grad_norm": 1.0898306656222858, | |
| "learning_rate": 6.096223174973091e-06, | |
| "loss": 0.3824, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8622754491017964, | |
| "grad_norm": 0.9763584233527128, | |
| "learning_rate": 6.073268341137694e-06, | |
| "loss": 0.3187, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.8652694610778443, | |
| "grad_norm": 1.1037005155142565, | |
| "learning_rate": 6.050289768744042e-06, | |
| "loss": 0.3035, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8682634730538922, | |
| "grad_norm": 1.1552949893574622, | |
| "learning_rate": 6.0272879660323936e-06, | |
| "loss": 0.4089, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8712574850299402, | |
| "grad_norm": 1.029617548036642, | |
| "learning_rate": 6.004263441756815e-06, | |
| "loss": 0.3732, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.874251497005988, | |
| "grad_norm": 1.0623508649511983, | |
| "learning_rate": 5.98121670517393e-06, | |
| "loss": 0.3514, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.8772455089820359, | |
| "grad_norm": 0.8949625466854254, | |
| "learning_rate": 5.958148266031654e-06, | |
| "loss": 0.3273, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8802395209580839, | |
| "grad_norm": 0.9891586201706486, | |
| "learning_rate": 5.935058634557917e-06, | |
| "loss": 0.3381, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8832335329341318, | |
| "grad_norm": 0.8993280891841109, | |
| "learning_rate": 5.911948321449384e-06, | |
| "loss": 0.3698, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8862275449101796, | |
| "grad_norm": 1.1092255445223, | |
| "learning_rate": 5.8888178378601565e-06, | |
| "loss": 0.3865, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8892215568862275, | |
| "grad_norm": 1.1517746657898535, | |
| "learning_rate": 5.865667695390468e-06, | |
| "loss": 0.3717, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.8922155688622755, | |
| "grad_norm": 0.9819545381570025, | |
| "learning_rate": 5.842498406075363e-06, | |
| "loss": 0.3514, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.8952095808383234, | |
| "grad_norm": 1.0437201087745172, | |
| "learning_rate": 5.819310482373381e-06, | |
| "loss": 0.3679, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.8982035928143712, | |
| "grad_norm": 1.017297447258417, | |
| "learning_rate": 5.796104437155213e-06, | |
| "loss": 0.4133, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9011976047904192, | |
| "grad_norm": 1.0426986159591238, | |
| "learning_rate": 5.772880783692363e-06, | |
| "loss": 0.3442, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9041916167664671, | |
| "grad_norm": 1.133705616825967, | |
| "learning_rate": 5.749640035645798e-06, | |
| "loss": 0.3362, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.907185628742515, | |
| "grad_norm": 1.0806943931447806, | |
| "learning_rate": 5.726382707054578e-06, | |
| "loss": 0.3667, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9101796407185628, | |
| "grad_norm": 1.0320318529985406, | |
| "learning_rate": 5.703109312324493e-06, | |
| "loss": 0.3614, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9131736526946108, | |
| "grad_norm": 1.1120370669266448, | |
| "learning_rate": 5.679820366216684e-06, | |
| "loss": 0.4372, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9161676646706587, | |
| "grad_norm": 0.960621733278799, | |
| "learning_rate": 5.656516383836263e-06, | |
| "loss": 0.3198, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9191616766467066, | |
| "grad_norm": 1.0171414143063844, | |
| "learning_rate": 5.6331978806209044e-06, | |
| "loss": 0.3841, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9221556886227545, | |
| "grad_norm": 1.0475670466548357, | |
| "learning_rate": 5.609865372329461e-06, | |
| "loss": 0.3479, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9251497005988024, | |
| "grad_norm": 0.9816134421329039, | |
| "learning_rate": 5.586519375030549e-06, | |
| "loss": 0.313, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9281437125748503, | |
| "grad_norm": 0.9226555440216212, | |
| "learning_rate": 5.5631604050911354e-06, | |
| "loss": 0.3405, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9311377245508982, | |
| "grad_norm": 1.1390317632022344, | |
| "learning_rate": 5.539788979165115e-06, | |
| "loss": 0.3195, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9341317365269461, | |
| "grad_norm": 0.9262738399132571, | |
| "learning_rate": 5.516405614181883e-06, | |
| "loss": 0.3085, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.937125748502994, | |
| "grad_norm": 0.9017024658346414, | |
| "learning_rate": 5.4930108273349034e-06, | |
| "loss": 0.3478, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9401197604790419, | |
| "grad_norm": 1.009518131571134, | |
| "learning_rate": 5.4696051360702725e-06, | |
| "loss": 0.3277, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9431137724550899, | |
| "grad_norm": 1.1427767571052792, | |
| "learning_rate": 5.446189058075265e-06, | |
| "loss": 0.3867, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9461077844311377, | |
| "grad_norm": 1.036250685743893, | |
| "learning_rate": 5.4227631112668955e-06, | |
| "loss": 0.3945, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9491017964071856, | |
| "grad_norm": 1.0948913765154202, | |
| "learning_rate": 5.39932781378045e-06, | |
| "loss": 0.3893, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.9520958083832335, | |
| "grad_norm": 0.8970637950602717, | |
| "learning_rate": 5.375883683958041e-06, | |
| "loss": 0.3497, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9550898203592815, | |
| "grad_norm": 1.0225586335941694, | |
| "learning_rate": 5.3524312403371255e-06, | |
| "loss": 0.3653, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9580838323353293, | |
| "grad_norm": 1.1349947187399525, | |
| "learning_rate": 5.328971001639054e-06, | |
| "loss": 0.3296, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9610778443113772, | |
| "grad_norm": 0.9630464347251071, | |
| "learning_rate": 5.3055034867575825e-06, | |
| "loss": 0.3334, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9640718562874252, | |
| "grad_norm": 1.1766917719584578, | |
| "learning_rate": 5.282029214747404e-06, | |
| "loss": 0.3914, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.9670658682634731, | |
| "grad_norm": 1.0090703010382933, | |
| "learning_rate": 5.258548704812667e-06, | |
| "loss": 0.3745, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9700598802395209, | |
| "grad_norm": 1.0240983740197453, | |
| "learning_rate": 5.235062476295488e-06, | |
| "loss": 0.3577, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.9730538922155688, | |
| "grad_norm": 1.054207020462211, | |
| "learning_rate": 5.211571048664469e-06, | |
| "loss": 0.3121, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9760479041916168, | |
| "grad_norm": 1.0418905854977334, | |
| "learning_rate": 5.188074941503203e-06, | |
| "loss": 0.3721, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.9790419161676647, | |
| "grad_norm": 1.0377401459754019, | |
| "learning_rate": 5.164574674498788e-06, | |
| "loss": 0.3648, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9820359281437125, | |
| "grad_norm": 1.1323566998059742, | |
| "learning_rate": 5.141070767430331e-06, | |
| "loss": 0.3516, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9850299401197605, | |
| "grad_norm": 1.0206982859117215, | |
| "learning_rate": 5.117563740157444e-06, | |
| "loss": 0.3494, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9880239520958084, | |
| "grad_norm": 1.1305169158319033, | |
| "learning_rate": 5.094054112608758e-06, | |
| "loss": 0.4197, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.9910179640718563, | |
| "grad_norm": 0.9270024273081134, | |
| "learning_rate": 5.070542404770413e-06, | |
| "loss": 0.3946, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.9940119760479041, | |
| "grad_norm": 0.9487207215039185, | |
| "learning_rate": 5.047029136674563e-06, | |
| "loss": 0.3234, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.9970059880239521, | |
| "grad_norm": 1.1679059724655723, | |
| "learning_rate": 5.023514828387868e-06, | |
| "loss": 0.4811, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.963466772905833, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2822, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0029940119760479, | |
| "grad_norm": 1.1038163202178393, | |
| "learning_rate": 4.976485171612134e-06, | |
| "loss": 0.2402, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.0059880239520957, | |
| "grad_norm": 0.9439308178641818, | |
| "learning_rate": 4.95297086332544e-06, | |
| "loss": 0.2578, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0089820359281436, | |
| "grad_norm": 1.0459901921231076, | |
| "learning_rate": 4.9294575952295896e-06, | |
| "loss": 0.274, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0119760479041917, | |
| "grad_norm": 1.0752838501456172, | |
| "learning_rate": 4.905945887391242e-06, | |
| "loss": 0.3031, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0149700598802396, | |
| "grad_norm": 1.0670794122757394, | |
| "learning_rate": 4.882436259842556e-06, | |
| "loss": 0.2774, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0179640718562875, | |
| "grad_norm": 0.9674785854060131, | |
| "learning_rate": 4.858929232569671e-06, | |
| "loss": 0.2693, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0209580838323353, | |
| "grad_norm": 1.0418787504336953, | |
| "learning_rate": 4.835425325501214e-06, | |
| "loss": 0.2867, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0239520958083832, | |
| "grad_norm": 0.9273975514140991, | |
| "learning_rate": 4.811925058496799e-06, | |
| "loss": 0.2279, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.026946107784431, | |
| "grad_norm": 1.1214794184235446, | |
| "learning_rate": 4.788428951335534e-06, | |
| "loss": 0.2909, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.029940119760479, | |
| "grad_norm": 0.9056792166529178, | |
| "learning_rate": 4.7649375237045135e-06, | |
| "loss": 0.2628, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.032934131736527, | |
| "grad_norm": 0.9942035520971275, | |
| "learning_rate": 4.741451295187333e-06, | |
| "loss": 0.2406, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.035928143712575, | |
| "grad_norm": 0.968551910495657, | |
| "learning_rate": 4.717970785252596e-06, | |
| "loss": 0.3153, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.0389221556886228, | |
| "grad_norm": 0.959066511538171, | |
| "learning_rate": 4.694496513242418e-06, | |
| "loss": 0.302, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.0419161676646707, | |
| "grad_norm": 1.0651106382353495, | |
| "learning_rate": 4.671028998360947e-06, | |
| "loss": 0.2481, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0449101796407185, | |
| "grad_norm": 1.0645658172847647, | |
| "learning_rate": 4.647568759662876e-06, | |
| "loss": 0.3041, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.0479041916167664, | |
| "grad_norm": 0.9641040212080574, | |
| "learning_rate": 4.624116316041962e-06, | |
| "loss": 0.2847, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0508982035928143, | |
| "grad_norm": 0.9705453522757115, | |
| "learning_rate": 4.600672186219551e-06, | |
| "loss": 0.2926, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.0538922155688624, | |
| "grad_norm": 1.090750995748997, | |
| "learning_rate": 4.5772368887331044e-06, | |
| "loss": 0.329, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.0568862275449102, | |
| "grad_norm": 0.9860942834457607, | |
| "learning_rate": 4.553810941924735e-06, | |
| "loss": 0.3182, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.0598802395209581, | |
| "grad_norm": 0.9947516928313157, | |
| "learning_rate": 4.530394863929728e-06, | |
| "loss": 0.2805, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.062874251497006, | |
| "grad_norm": 1.0152103368161638, | |
| "learning_rate": 4.506989172665097e-06, | |
| "loss": 0.2683, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.0658682634730539, | |
| "grad_norm": 1.0671034757235769, | |
| "learning_rate": 4.483594385818119e-06, | |
| "loss": 0.2613, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.0688622754491017, | |
| "grad_norm": 1.1101237480576047, | |
| "learning_rate": 4.460211020834887e-06, | |
| "loss": 0.2776, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.0718562874251496, | |
| "grad_norm": 1.0926944083273815, | |
| "learning_rate": 4.436839594908866e-06, | |
| "loss": 0.3395, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.0748502994011977, | |
| "grad_norm": 1.162819526757365, | |
| "learning_rate": 4.4134806249694514e-06, | |
| "loss": 0.3096, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.0778443113772456, | |
| "grad_norm": 1.106813840488466, | |
| "learning_rate": 4.39013462767054e-06, | |
| "loss": 0.3285, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.0808383233532934, | |
| "grad_norm": 0.9835710232116306, | |
| "learning_rate": 4.366802119379098e-06, | |
| "loss": 0.225, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.0838323353293413, | |
| "grad_norm": 0.936604619916719, | |
| "learning_rate": 4.34348361616374e-06, | |
| "loss": 0.3305, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0868263473053892, | |
| "grad_norm": 1.186118149852767, | |
| "learning_rate": 4.3201796337833165e-06, | |
| "loss": 0.3048, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.089820359281437, | |
| "grad_norm": 1.2077002584599066, | |
| "learning_rate": 4.29689068767551e-06, | |
| "loss": 0.333, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.092814371257485, | |
| "grad_norm": 0.9936218749411708, | |
| "learning_rate": 4.273617292945425e-06, | |
| "loss": 0.2776, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.095808383233533, | |
| "grad_norm": 0.9510483327426793, | |
| "learning_rate": 4.250359964354203e-06, | |
| "loss": 0.2886, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.098802395209581, | |
| "grad_norm": 1.0933620437452058, | |
| "learning_rate": 4.227119216307637e-06, | |
| "loss": 0.2919, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1017964071856288, | |
| "grad_norm": 1.0497701233499976, | |
| "learning_rate": 4.203895562844789e-06, | |
| "loss": 0.3287, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1047904191616766, | |
| "grad_norm": 1.1246610130672805, | |
| "learning_rate": 4.18068951762662e-06, | |
| "loss": 0.3377, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.1077844311377245, | |
| "grad_norm": 1.0272251389183615, | |
| "learning_rate": 4.157501593924638e-06, | |
| "loss": 0.2609, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1107784431137724, | |
| "grad_norm": 0.9591853088905555, | |
| "learning_rate": 4.134332304609533e-06, | |
| "loss": 0.3029, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1137724550898203, | |
| "grad_norm": 0.8559562239842542, | |
| "learning_rate": 4.111182162139844e-06, | |
| "loss": 0.2827, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1167664670658684, | |
| "grad_norm": 1.1089804658991524, | |
| "learning_rate": 4.088051678550617e-06, | |
| "loss": 0.2745, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.1197604790419162, | |
| "grad_norm": 1.0692427931482376, | |
| "learning_rate": 4.064941365442084e-06, | |
| "loss": 0.2911, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.122754491017964, | |
| "grad_norm": 1.2437802366455162, | |
| "learning_rate": 4.041851733968348e-06, | |
| "loss": 0.335, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.125748502994012, | |
| "grad_norm": 1.0566923905800043, | |
| "learning_rate": 4.018783294826071e-06, | |
| "loss": 0.3087, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.1287425149700598, | |
| "grad_norm": 1.123414045867036, | |
| "learning_rate": 3.995736558243186e-06, | |
| "loss": 0.3271, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.1317365269461077, | |
| "grad_norm": 1.0045033372465337, | |
| "learning_rate": 3.972712033967608e-06, | |
| "loss": 0.2893, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.1347305389221556, | |
| "grad_norm": 0.9903061130943368, | |
| "learning_rate": 3.949710231255961e-06, | |
| "loss": 0.3376, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.1377245508982037, | |
| "grad_norm": 1.2510349069206157, | |
| "learning_rate": 3.926731658862307e-06, | |
| "loss": 0.2889, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1407185628742516, | |
| "grad_norm": 1.0038425254175143, | |
| "learning_rate": 3.903776825026912e-06, | |
| "loss": 0.3289, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.1437125748502994, | |
| "grad_norm": 0.9938234890357625, | |
| "learning_rate": 3.8808462374649805e-06, | |
| "loss": 0.2998, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.1467065868263473, | |
| "grad_norm": 1.0246750644660116, | |
| "learning_rate": 3.857940403355444e-06, | |
| "loss": 0.2662, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.1497005988023952, | |
| "grad_norm": 1.039803508748742, | |
| "learning_rate": 3.8350598293297345e-06, | |
| "loss": 0.3201, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.152694610778443, | |
| "grad_norm": 1.014959652124324, | |
| "learning_rate": 3.8122050214605822e-06, | |
| "loss": 0.2702, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.1556886227544911, | |
| "grad_norm": 0.9975291572056869, | |
| "learning_rate": 3.7893764852508207e-06, | |
| "loss": 0.3035, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.158682634730539, | |
| "grad_norm": 1.0199777385264022, | |
| "learning_rate": 3.766574725622208e-06, | |
| "loss": 0.2586, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.1616766467065869, | |
| "grad_norm": 0.9759448313629795, | |
| "learning_rate": 3.7438002469042567e-06, | |
| "loss": 0.289, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.1646706586826348, | |
| "grad_norm": 1.0765071106811648, | |
| "learning_rate": 3.721053552823078e-06, | |
| "loss": 0.2991, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.1676646706586826, | |
| "grad_norm": 1.0202142143948365, | |
| "learning_rate": 3.698335146490246e-06, | |
| "loss": 0.2855, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1706586826347305, | |
| "grad_norm": 1.1582146793042545, | |
| "learning_rate": 3.675645530391665e-06, | |
| "loss": 0.2823, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.1736526946107784, | |
| "grad_norm": 1.0397077710100364, | |
| "learning_rate": 3.652985206376455e-06, | |
| "loss": 0.2614, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.1766467065868262, | |
| "grad_norm": 0.8459228285294651, | |
| "learning_rate": 3.630354675645853e-06, | |
| "loss": 0.2509, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.1796407185628743, | |
| "grad_norm": 1.074927652267696, | |
| "learning_rate": 3.6077544387421293e-06, | |
| "loss": 0.3075, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.1826347305389222, | |
| "grad_norm": 1.1066622312555507, | |
| "learning_rate": 3.5851849955375177e-06, | |
| "loss": 0.2545, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.18562874251497, | |
| "grad_norm": 1.0380594135380292, | |
| "learning_rate": 3.5626468452231534e-06, | |
| "loss": 0.2664, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.188622754491018, | |
| "grad_norm": 1.0350740479215617, | |
| "learning_rate": 3.540140486298035e-06, | |
| "loss": 0.2239, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.1916167664670658, | |
| "grad_norm": 0.9671972197294936, | |
| "learning_rate": 3.517666416557999e-06, | |
| "loss": 0.323, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.1946107784431137, | |
| "grad_norm": 1.0553578297585076, | |
| "learning_rate": 3.495225133084712e-06, | |
| "loss": 0.2942, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.1976047904191618, | |
| "grad_norm": 0.9668856112058133, | |
| "learning_rate": 3.472817132234669e-06, | |
| "loss": 0.3097, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.1976047904191618, | |
| "eval_loss": 0.3693498969078064, | |
| "eval_runtime": 2.0745, | |
| "eval_samples_per_second": 26.03, | |
| "eval_steps_per_second": 6.749, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2005988023952097, | |
| "grad_norm": 0.9818768718871981, | |
| "learning_rate": 3.4504429096282246e-06, | |
| "loss": 0.3246, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2035928143712575, | |
| "grad_norm": 1.056124662708042, | |
| "learning_rate": 3.428102960138625e-06, | |
| "loss": 0.3118, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2065868263473054, | |
| "grad_norm": 1.085430749955511, | |
| "learning_rate": 3.405797777881059e-06, | |
| "loss": 0.3247, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.2095808383233533, | |
| "grad_norm": 1.109869803902745, | |
| "learning_rate": 3.3835278562017405e-06, | |
| "loss": 0.2679, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.2125748502994012, | |
| "grad_norm": 0.9653457634585856, | |
| "learning_rate": 3.3612936876669834e-06, | |
| "loss": 0.2724, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.215568862275449, | |
| "grad_norm": 0.9138963278075037, | |
| "learning_rate": 3.3390957640523147e-06, | |
| "loss": 0.2675, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.218562874251497, | |
| "grad_norm": 0.9328413785625903, | |
| "learning_rate": 3.3169345763315986e-06, | |
| "loss": 0.2853, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.221556886227545, | |
| "grad_norm": 1.0516384087001678, | |
| "learning_rate": 3.29481061466617e-06, | |
| "loss": 0.3054, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.2245508982035929, | |
| "grad_norm": 1.1253453724856757, | |
| "learning_rate": 3.2727243683940045e-06, | |
| "loss": 0.2819, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.2275449101796407, | |
| "grad_norm": 1.0571410903586207, | |
| "learning_rate": 3.2506763260188824e-06, | |
| "loss": 0.3098, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2305389221556886, | |
| "grad_norm": 0.9251864173640302, | |
| "learning_rate": 3.2286669751995905e-06, | |
| "loss": 0.2918, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.2335329341317365, | |
| "grad_norm": 1.1343137223136206, | |
| "learning_rate": 3.2066968027391377e-06, | |
| "loss": 0.2913, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.2365269461077844, | |
| "grad_norm": 0.9482192781318599, | |
| "learning_rate": 3.1847662945739833e-06, | |
| "loss": 0.2809, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.2395209580838324, | |
| "grad_norm": 1.0489394179575684, | |
| "learning_rate": 3.1628759357632943e-06, | |
| "loss": 0.307, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.2425149700598803, | |
| "grad_norm": 1.1995655945447297, | |
| "learning_rate": 3.1410262104782086e-06, | |
| "loss": 0.2633, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.2455089820359282, | |
| "grad_norm": 1.0965588624520188, | |
| "learning_rate": 3.119217601991139e-06, | |
| "loss": 0.2885, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.248502994011976, | |
| "grad_norm": 1.0978620772266037, | |
| "learning_rate": 3.0974505926650724e-06, | |
| "loss": 0.2449, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.251497005988024, | |
| "grad_norm": 0.9566279299243072, | |
| "learning_rate": 3.0757256639429027e-06, | |
| "loss": 0.2785, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2544910179640718, | |
| "grad_norm": 1.080252236964104, | |
| "learning_rate": 3.0540432963367907e-06, | |
| "loss": 0.3217, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.2574850299401197, | |
| "grad_norm": 1.063971060965149, | |
| "learning_rate": 3.032403969417523e-06, | |
| "loss": 0.2812, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2604790419161676, | |
| "grad_norm": 0.9704172296003442, | |
| "learning_rate": 3.010808161803917e-06, | |
| "loss": 0.267, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.2634730538922156, | |
| "grad_norm": 0.9939978410887771, | |
| "learning_rate": 2.9892563511522305e-06, | |
| "loss": 0.3196, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.2664670658682635, | |
| "grad_norm": 1.0217305858381498, | |
| "learning_rate": 2.9677490141455915e-06, | |
| "loss": 0.2686, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.2694610778443114, | |
| "grad_norm": 1.0077762369146446, | |
| "learning_rate": 2.946286626483463e-06, | |
| "loss": 0.321, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.2724550898203593, | |
| "grad_norm": 1.2045671843348047, | |
| "learning_rate": 2.924869662871117e-06, | |
| "loss": 0.2911, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.2754491017964071, | |
| "grad_norm": 1.0330676293641845, | |
| "learning_rate": 2.903498597009136e-06, | |
| "loss": 0.2989, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.278443113772455, | |
| "grad_norm": 1.116401826098273, | |
| "learning_rate": 2.8821739015829338e-06, | |
| "loss": 0.2991, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.281437125748503, | |
| "grad_norm": 1.0183731095003274, | |
| "learning_rate": 2.8608960482523058e-06, | |
| "loss": 0.285, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.284431137724551, | |
| "grad_norm": 1.0146098027998318, | |
| "learning_rate": 2.839665507640992e-06, | |
| "loss": 0.2796, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.2874251497005988, | |
| "grad_norm": 0.9389899747366528, | |
| "learning_rate": 2.818482749326272e-06, | |
| "loss": 0.3354, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.2904191616766467, | |
| "grad_norm": 0.9334020290971811, | |
| "learning_rate": 2.797348241828569e-06, | |
| "loss": 0.2898, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.2934131736526946, | |
| "grad_norm": 0.9367010903945576, | |
| "learning_rate": 2.776262452601104e-06, | |
| "loss": 0.3172, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.2964071856287425, | |
| "grad_norm": 1.0293549689092885, | |
| "learning_rate": 2.7552258480195348e-06, | |
| "loss": 0.2794, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.2994011976047903, | |
| "grad_norm": 0.9240368132541306, | |
| "learning_rate": 2.734238893371667e-06, | |
| "loss": 0.2701, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.3023952095808382, | |
| "grad_norm": 0.9906999330027314, | |
| "learning_rate": 2.7133020528471322e-06, | |
| "loss": 0.2809, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3053892215568863, | |
| "grad_norm": 0.9744488517144564, | |
| "learning_rate": 2.6924157895271563e-06, | |
| "loss": 0.293, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3083832335329342, | |
| "grad_norm": 1.0033858656719574, | |
| "learning_rate": 2.671580565374282e-06, | |
| "loss": 0.3217, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.311377245508982, | |
| "grad_norm": 1.011600307309713, | |
| "learning_rate": 2.6507968412221763e-06, | |
| "loss": 0.2422, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.31437125748503, | |
| "grad_norm": 1.11189134066282, | |
| "learning_rate": 2.6300650767654234e-06, | |
| "loss": 0.2744, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.3173652694610778, | |
| "grad_norm": 0.9469018716498592, | |
| "learning_rate": 2.6093857305493666e-06, | |
| "loss": 0.2858, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.3203592814371259, | |
| "grad_norm": 0.9803118925621244, | |
| "learning_rate": 2.588759259959962e-06, | |
| "loss": 0.2624, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.3233532934131738, | |
| "grad_norm": 1.0239619282343333, | |
| "learning_rate": 2.568186121213658e-06, | |
| "loss": 0.3087, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.3263473053892216, | |
| "grad_norm": 1.0337294346895696, | |
| "learning_rate": 2.547666769347312e-06, | |
| "loss": 0.2979, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.3293413173652695, | |
| "grad_norm": 0.9690903080271954, | |
| "learning_rate": 2.5272016582081236e-06, | |
| "loss": 0.353, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.3323353293413174, | |
| "grad_norm": 1.0917776679498428, | |
| "learning_rate": 2.5067912404435952e-06, | |
| "loss": 0.3154, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.3353293413173652, | |
| "grad_norm": 1.0341877452666952, | |
| "learning_rate": 2.486435967491516e-06, | |
| "loss": 0.2781, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.3383233532934131, | |
| "grad_norm": 1.0088471355077344, | |
| "learning_rate": 2.4661362895699903e-06, | |
| "loss": 0.2732, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.341317365269461, | |
| "grad_norm": 1.0429412895760934, | |
| "learning_rate": 2.445892655667462e-06, | |
| "loss": 0.3666, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.3443113772455089, | |
| "grad_norm": 1.1443662715754788, | |
| "learning_rate": 2.425705513532798e-06, | |
| "loss": 0.3141, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.347305389221557, | |
| "grad_norm": 0.999254604061352, | |
| "learning_rate": 2.4055753096653795e-06, | |
| "loss": 0.3234, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3502994011976048, | |
| "grad_norm": 0.9554262386411405, | |
| "learning_rate": 2.3855024893052286e-06, | |
| "loss": 0.2588, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.3532934131736527, | |
| "grad_norm": 1.0092424482732858, | |
| "learning_rate": 2.365487496423152e-06, | |
| "loss": 0.2867, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.3562874251497006, | |
| "grad_norm": 0.8824405698753732, | |
| "learning_rate": 2.3455307737109338e-06, | |
| "loss": 0.2418, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.3592814371257484, | |
| "grad_norm": 0.9269365911693727, | |
| "learning_rate": 2.3256327625715345e-06, | |
| "loss": 0.2406, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3622754491017965, | |
| "grad_norm": 1.0683071236638744, | |
| "learning_rate": 2.3057939031093346e-06, | |
| "loss": 0.2765, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.3652694610778444, | |
| "grad_norm": 0.9094068896182096, | |
| "learning_rate": 2.2860146341203936e-06, | |
| "loss": 0.2669, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3682634730538923, | |
| "grad_norm": 1.0430179524496415, | |
| "learning_rate": 2.2662953930827546e-06, | |
| "loss": 0.3522, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.3712574850299402, | |
| "grad_norm": 1.0967254686382057, | |
| "learning_rate": 2.2466366161467528e-06, | |
| "loss": 0.2885, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.374251497005988, | |
| "grad_norm": 1.113655664070299, | |
| "learning_rate": 2.227038738125385e-06, | |
| "loss": 0.2621, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.377245508982036, | |
| "grad_norm": 1.1947268702376181, | |
| "learning_rate": 2.207502192484685e-06, | |
| "loss": 0.28, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.3802395209580838, | |
| "grad_norm": 0.928199271967082, | |
| "learning_rate": 2.188027411334131e-06, | |
| "loss": 0.2767, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.3832335329341316, | |
| "grad_norm": 1.0741630054105522, | |
| "learning_rate": 2.1686148254171012e-06, | |
| "loss": 0.2768, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.3862275449101795, | |
| "grad_norm": 1.186068232666103, | |
| "learning_rate": 2.1492648641013305e-06, | |
| "loss": 0.2665, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.3892215568862276, | |
| "grad_norm": 0.9210593951202511, | |
| "learning_rate": 2.1299779553694323e-06, | |
| "loss": 0.2659, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.3922155688622755, | |
| "grad_norm": 1.1044867591546301, | |
| "learning_rate": 2.1107545258094135e-06, | |
| "loss": 0.2803, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.3952095808383234, | |
| "grad_norm": 1.031573990487462, | |
| "learning_rate": 2.0915950006052555e-06, | |
| "loss": 0.233, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.3982035928143712, | |
| "grad_norm": 0.9655158836794998, | |
| "learning_rate": 2.0724998035274947e-06, | |
| "loss": 0.2764, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.401197604790419, | |
| "grad_norm": 1.0172003587953486, | |
| "learning_rate": 2.053469356923865e-06, | |
| "loss": 0.229, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4041916167664672, | |
| "grad_norm": 1.0067623372218106, | |
| "learning_rate": 2.0345040817099433e-06, | |
| "loss": 0.2907, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.407185628742515, | |
| "grad_norm": 0.9480842454728966, | |
| "learning_rate": 2.0156043973598475e-06, | |
| "loss": 0.2587, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.410179640718563, | |
| "grad_norm": 1.1560079443177578, | |
| "learning_rate": 1.996770721896957e-06, | |
| "loss": 0.2935, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.4131736526946108, | |
| "grad_norm": 1.1309043619436994, | |
| "learning_rate": 1.9780034718846653e-06, | |
| "loss": 0.2769, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.4161676646706587, | |
| "grad_norm": 0.9960471449258698, | |
| "learning_rate": 1.9593030624171683e-06, | |
| "loss": 0.3077, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.4191616766467066, | |
| "grad_norm": 1.1157076766455758, | |
| "learning_rate": 1.9406699071102774e-06, | |
| "loss": 0.3164, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.4221556886227544, | |
| "grad_norm": 0.9781452758812129, | |
| "learning_rate": 1.9221044180922833e-06, | |
| "loss": 0.313, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.4251497005988023, | |
| "grad_norm": 1.0510296364291654, | |
| "learning_rate": 1.9036070059948253e-06, | |
| "loss": 0.2925, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.4281437125748502, | |
| "grad_norm": 1.1121349316079836, | |
| "learning_rate": 1.885178079943823e-06, | |
| "loss": 0.3117, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.4311377245508983, | |
| "grad_norm": 1.0708487444353973, | |
| "learning_rate": 1.866818047550419e-06, | |
| "loss": 0.3608, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.4341317365269461, | |
| "grad_norm": 0.9472532758442945, | |
| "learning_rate": 1.8485273149019655e-06, | |
| "loss": 0.2726, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.437125748502994, | |
| "grad_norm": 1.1274922482499445, | |
| "learning_rate": 1.8303062865530407e-06, | |
| "loss": 0.255, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.4401197604790419, | |
| "grad_norm": 1.089568199658486, | |
| "learning_rate": 1.8121553655165058e-06, | |
| "loss": 0.2699, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.4431137724550898, | |
| "grad_norm": 1.0546253995538688, | |
| "learning_rate": 1.7940749532545832e-06, | |
| "loss": 0.326, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.4461077844311379, | |
| "grad_norm": 1.2264137743297507, | |
| "learning_rate": 1.7760654496699876e-06, | |
| "loss": 0.282, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.4491017964071857, | |
| "grad_norm": 1.0060274414788548, | |
| "learning_rate": 1.7581272530970666e-06, | |
| "loss": 0.2983, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.4520958083832336, | |
| "grad_norm": 0.9619347223999136, | |
| "learning_rate": 1.7402607602930106e-06, | |
| "loss": 0.28, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.4550898203592815, | |
| "grad_norm": 1.1355226571102557, | |
| "learning_rate": 1.7224663664290537e-06, | |
| "loss": 0.3382, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.4580838323353293, | |
| "grad_norm": 1.0086173130977534, | |
| "learning_rate": 1.7047444650817518e-06, | |
| "loss": 0.2872, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.4610778443113772, | |
| "grad_norm": 1.1127648516229565, | |
| "learning_rate": 1.6870954482242707e-06, | |
| "loss": 0.2724, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.464071856287425, | |
| "grad_norm": 0.9832310826065905, | |
| "learning_rate": 1.669519706217711e-06, | |
| "loss": 0.2437, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.467065868263473, | |
| "grad_norm": 1.0622879657436979, | |
| "learning_rate": 1.652017627802487e-06, | |
| "loss": 0.3091, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4700598802395208, | |
| "grad_norm": 0.9386687663863547, | |
| "learning_rate": 1.6345896000897122e-06, | |
| "loss": 0.2635, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.473053892215569, | |
| "grad_norm": 0.897937240725058, | |
| "learning_rate": 1.6172360085526567e-06, | |
| "loss": 0.2986, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.4760479041916168, | |
| "grad_norm": 0.9020970971560415, | |
| "learning_rate": 1.5999572370182016e-06, | |
| "loss": 0.2997, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.4790419161676647, | |
| "grad_norm": 1.0441938176698309, | |
| "learning_rate": 1.5827536676583643e-06, | |
| "loss": 0.2974, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.4820359281437125, | |
| "grad_norm": 1.0375052996463812, | |
| "learning_rate": 1.5656256809818343e-06, | |
| "loss": 0.2815, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.4850299401197604, | |
| "grad_norm": 1.1276079811444029, | |
| "learning_rate": 1.54857365582557e-06, | |
| "loss": 0.2529, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.4880239520958085, | |
| "grad_norm": 1.0318039132846863, | |
| "learning_rate": 1.5315979693464039e-06, | |
| "loss": 0.2983, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.4910179640718564, | |
| "grad_norm": 1.153390011507833, | |
| "learning_rate": 1.5146989970127158e-06, | |
| "loss": 0.2511, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.4940119760479043, | |
| "grad_norm": 1.0532180938840872, | |
| "learning_rate": 1.4978771125961177e-06, | |
| "loss": 0.2952, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.4970059880239521, | |
| "grad_norm": 0.9281788535505504, | |
| "learning_rate": 1.4811326881631937e-06, | |
| "loss": 0.2927, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.9857787780501807, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.2885, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.5029940119760479, | |
| "grad_norm": 1.0408667450292723, | |
| "learning_rate": 1.4478776989401949e-06, | |
| "loss": 0.3037, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.5059880239520957, | |
| "grad_norm": 1.0816927299696908, | |
| "learning_rate": 1.4313678696842559e-06, | |
| "loss": 0.2804, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.5089820359281436, | |
| "grad_norm": 1.0188952138113099, | |
| "learning_rate": 1.4149369714639856e-06, | |
| "loss": 0.2801, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.5119760479041915, | |
| "grad_norm": 1.0326848434661613, | |
| "learning_rate": 1.3985853676981316e-06, | |
| "loss": 0.289, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.5149700598802394, | |
| "grad_norm": 1.029640567846141, | |
| "learning_rate": 1.3823134200516043e-06, | |
| "loss": 0.2796, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.5179640718562875, | |
| "grad_norm": 1.1331584019397833, | |
| "learning_rate": 1.366121488427481e-06, | |
| "loss": 0.2752, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.5209580838323353, | |
| "grad_norm": 1.1161301605607417, | |
| "learning_rate": 1.3500099309590397e-06, | |
| "loss": 0.2967, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.5239520958083832, | |
| "grad_norm": 1.0801243931631266, | |
| "learning_rate": 1.3339791040018479e-06, | |
| "loss": 0.3321, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.5269461077844313, | |
| "grad_norm": 0.9588033298979124, | |
| "learning_rate": 1.3180293621258694e-06, | |
| "loss": 0.2847, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.5299401197604792, | |
| "grad_norm": 1.0714461729400162, | |
| "learning_rate": 1.3021610581076316e-06, | |
| "loss": 0.2462, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.532934131736527, | |
| "grad_norm": 1.1293384420310908, | |
| "learning_rate": 1.2863745429224145e-06, | |
| "loss": 0.285, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.535928143712575, | |
| "grad_norm": 0.9787160982168177, | |
| "learning_rate": 1.270670165736499e-06, | |
| "loss": 0.2895, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.5389221556886228, | |
| "grad_norm": 1.2138400284810715, | |
| "learning_rate": 1.2550482738994284e-06, | |
| "loss": 0.3481, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.5419161676646707, | |
| "grad_norm": 1.0577605935604286, | |
| "learning_rate": 1.239509212936343e-06, | |
| "loss": 0.2876, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.5449101796407185, | |
| "grad_norm": 1.0634837772496648, | |
| "learning_rate": 1.22405332654032e-06, | |
| "loss": 0.2729, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.5479041916167664, | |
| "grad_norm": 0.9020595183913013, | |
| "learning_rate": 1.2086809565647877e-06, | |
| "loss": 0.2677, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.5508982035928143, | |
| "grad_norm": 1.2959297717360532, | |
| "learning_rate": 1.1933924430159571e-06, | |
| "loss": 0.2792, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.5538922155688621, | |
| "grad_norm": 1.0110483516177153, | |
| "learning_rate": 1.1781881240452958e-06, | |
| "loss": 0.2822, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.55688622754491, | |
| "grad_norm": 1.0446390155714294, | |
| "learning_rate": 1.1630683359420653e-06, | |
| "loss": 0.2935, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.5598802395209581, | |
| "grad_norm": 1.0690986124227901, | |
| "learning_rate": 1.1480334131258626e-06, | |
| "loss": 0.2289, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.562874251497006, | |
| "grad_norm": 1.1430403868252486, | |
| "learning_rate": 1.1330836881392405e-06, | |
| "loss": 0.2718, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.5658682634730539, | |
| "grad_norm": 1.0375537372951282, | |
| "learning_rate": 1.11821949164034e-06, | |
| "loss": 0.297, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.568862275449102, | |
| "grad_norm": 1.176974027653935, | |
| "learning_rate": 1.103441152395588e-06, | |
| "loss": 0.3502, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.5718562874251498, | |
| "grad_norm": 1.0696722027143732, | |
| "learning_rate": 1.088748997272414e-06, | |
| "loss": 0.2993, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.5748502994011977, | |
| "grad_norm": 1.0346499714868336, | |
| "learning_rate": 1.0741433512320316e-06, | |
| "loss": 0.2329, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.5778443113772456, | |
| "grad_norm": 1.1517848564229314, | |
| "learning_rate": 1.0596245373222424e-06, | |
| "loss": 0.2675, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.5808383233532934, | |
| "grad_norm": 1.0284607566075672, | |
| "learning_rate": 1.045192876670298e-06, | |
| "loss": 0.3123, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.5838323353293413, | |
| "grad_norm": 1.011579712301013, | |
| "learning_rate": 1.0308486884757868e-06, | |
| "loss": 0.3192, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.5868263473053892, | |
| "grad_norm": 0.9498420839416503, | |
| "learning_rate": 1.0165922900035886e-06, | |
| "loss": 0.2904, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.589820359281437, | |
| "grad_norm": 0.9815837486238403, | |
| "learning_rate": 1.0024239965768417e-06, | |
| "loss": 0.2808, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.592814371257485, | |
| "grad_norm": 1.0522449608275906, | |
| "learning_rate": 9.883441215699824e-07, | |
| "loss": 0.2142, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.5958083832335328, | |
| "grad_norm": 1.2241523212020757, | |
| "learning_rate": 9.74352976401805e-07, | |
| "loss": 0.3493, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.5988023952095807, | |
| "grad_norm": 1.1753755980088134, | |
| "learning_rate": 9.604508705285765e-07, | |
| "loss": 0.2939, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.6017964071856288, | |
| "grad_norm": 1.0826047408827566, | |
| "learning_rate": 9.466381114371942e-07, | |
| "loss": 0.2975, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.6047904191616766, | |
| "grad_norm": 1.0317977756518482, | |
| "learning_rate": 9.329150046383773e-07, | |
| "loss": 0.2789, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.6077844311377245, | |
| "grad_norm": 1.0904599792581928, | |
| "learning_rate": 9.192818536599213e-07, | |
| "loss": 0.2923, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.6107784431137726, | |
| "grad_norm": 1.0215977387126998, | |
| "learning_rate": 9.057389600399719e-07, | |
| "loss": 0.2997, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.6137724550898205, | |
| "grad_norm": 0.976235862219578, | |
| "learning_rate": 8.922866233203681e-07, | |
| "loss": 0.2695, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.6167664670658684, | |
| "grad_norm": 0.8292764935570617, | |
| "learning_rate": 8.789251410400024e-07, | |
| "loss": 0.223, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.6197604790419162, | |
| "grad_norm": 0.8910398864974575, | |
| "learning_rate": 8.65654808728259e-07, | |
| "loss": 0.2765, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.622754491017964, | |
| "grad_norm": 1.2447041601300974, | |
| "learning_rate": 8.524759198984567e-07, | |
| "loss": 0.254, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.625748502994012, | |
| "grad_norm": 1.0180965706579113, | |
| "learning_rate": 8.393887660413719e-07, | |
| "loss": 0.2857, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.6287425149700598, | |
| "grad_norm": 1.255705210510179, | |
| "learning_rate": 8.263936366187825e-07, | |
| "loss": 0.3466, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.6317365269461077, | |
| "grad_norm": 1.0150198301967028, | |
| "learning_rate": 8.134908190570723e-07, | |
| "loss": 0.3045, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.6347305389221556, | |
| "grad_norm": 0.890330932939154, | |
| "learning_rate": 8.006805987408705e-07, | |
| "loss": 0.2097, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.6377245508982035, | |
| "grad_norm": 1.0598977821495503, | |
| "learning_rate": 7.879632590067354e-07, | |
| "loss": 0.2945, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.6407185628742516, | |
| "grad_norm": 1.1140121729115857, | |
| "learning_rate": 7.753390811368972e-07, | |
| "loss": 0.3282, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.6437125748502994, | |
| "grad_norm": 1.0160729111471394, | |
| "learning_rate": 7.628083443530287e-07, | |
| "loss": 0.2808, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.6467065868263473, | |
| "grad_norm": 1.1725275613210664, | |
| "learning_rate": 7.503713258100726e-07, | |
| "loss": 0.3203, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.6497005988023952, | |
| "grad_norm": 0.9756846296384402, | |
| "learning_rate": 7.380283005901084e-07, | |
| "loss": 0.2892, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.6526946107784433, | |
| "grad_norm": 0.9589031538582429, | |
| "learning_rate": 7.257795416962754e-07, | |
| "loss": 0.2606, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.6556886227544911, | |
| "grad_norm": 1.0260127763890292, | |
| "learning_rate": 7.136253200467231e-07, | |
| "loss": 0.2848, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.658682634730539, | |
| "grad_norm": 1.0638338266318648, | |
| "learning_rate": 7.015659044686307e-07, | |
| "loss": 0.2764, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.6616766467065869, | |
| "grad_norm": 1.034756665824474, | |
| "learning_rate": 6.896015616922535e-07, | |
| "loss": 0.2862, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.6646706586826348, | |
| "grad_norm": 0.9517593679522471, | |
| "learning_rate": 6.777325563450282e-07, | |
| "loss": 0.3227, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.6676646706586826, | |
| "grad_norm": 1.0087476237195276, | |
| "learning_rate": 6.659591509457125e-07, | |
| "loss": 0.277, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.6706586826347305, | |
| "grad_norm": 1.1419540564400483, | |
| "learning_rate": 6.542816058985896e-07, | |
| "loss": 0.2985, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.6736526946107784, | |
| "grad_norm": 1.0948747684546611, | |
| "learning_rate": 6.427001794876974e-07, | |
| "loss": 0.2833, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.6766467065868262, | |
| "grad_norm": 0.9360608941611567, | |
| "learning_rate": 6.312151278711237e-07, | |
| "loss": 0.3004, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6796407185628741, | |
| "grad_norm": 0.939868141128382, | |
| "learning_rate": 6.198267050753387e-07, | |
| "loss": 0.2807, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.6826347305389222, | |
| "grad_norm": 1.0266935563707245, | |
| "learning_rate": 6.085351629895736e-07, | |
| "loss": 0.2759, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.68562874251497, | |
| "grad_norm": 1.1380332770043855, | |
| "learning_rate": 5.973407513602514e-07, | |
| "loss": 0.2602, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.688622754491018, | |
| "grad_norm": 1.0519438463503428, | |
| "learning_rate": 5.862437177854629e-07, | |
| "loss": 0.3035, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.6916167664670658, | |
| "grad_norm": 1.2169465575295926, | |
| "learning_rate": 5.752443077094927e-07, | |
| "loss": 0.2527, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.694610778443114, | |
| "grad_norm": 1.00460920180784, | |
| "learning_rate": 5.643427644173838e-07, | |
| "loss": 0.2883, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.6976047904191618, | |
| "grad_norm": 0.9166236516976071, | |
| "learning_rate": 5.535393290295643e-07, | |
| "loss": 0.2648, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.7005988023952097, | |
| "grad_norm": 1.0224218447371933, | |
| "learning_rate": 5.428342404965076e-07, | |
| "loss": 0.2532, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.7035928143712575, | |
| "grad_norm": 1.0827482075251529, | |
| "learning_rate": 5.322277355934557e-07, | |
| "loss": 0.324, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.7065868263473054, | |
| "grad_norm": 0.9714111449515691, | |
| "learning_rate": 5.217200489151714e-07, | |
| "loss": 0.285, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.7095808383233533, | |
| "grad_norm": 0.9364115965660317, | |
| "learning_rate": 5.113114128707592e-07, | |
| "loss": 0.2353, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.7125748502994012, | |
| "grad_norm": 1.0763449904071511, | |
| "learning_rate": 5.010020576785174e-07, | |
| "loss": 0.3051, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.715568862275449, | |
| "grad_norm": 0.969300404999829, | |
| "learning_rate": 4.907922113608532e-07, | |
| "loss": 0.2383, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.718562874251497, | |
| "grad_norm": 1.0519712300939217, | |
| "learning_rate": 4.806820997392325e-07, | |
| "loss": 0.2826, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.7215568862275448, | |
| "grad_norm": 0.9558157603046407, | |
| "learning_rate": 4.7067194642919036e-07, | |
| "loss": 0.282, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.7245508982035929, | |
| "grad_norm": 1.1085334878885595, | |
| "learning_rate": 4.607619728353818e-07, | |
| "loss": 0.2681, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.7275449101796407, | |
| "grad_norm": 0.9267873461117612, | |
| "learning_rate": 4.50952398146689e-07, | |
| "loss": 0.2772, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.7305389221556886, | |
| "grad_norm": 1.0012895885975834, | |
| "learning_rate": 4.4124343933136525e-07, | |
| "loss": 0.2745, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.7335329341317365, | |
| "grad_norm": 0.9412302789794276, | |
| "learning_rate": 4.3163531113224466e-07, | |
| "loss": 0.2533, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.7365269461077846, | |
| "grad_norm": 0.9914749596703888, | |
| "learning_rate": 4.221282260619891e-07, | |
| "loss": 0.2932, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7395209580838324, | |
| "grad_norm": 0.9273325569543667, | |
| "learning_rate": 4.127223943983849e-07, | |
| "loss": 0.2623, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.7425149700598803, | |
| "grad_norm": 0.974899200106077, | |
| "learning_rate": 4.03418024179697e-07, | |
| "loss": 0.2599, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.7455089820359282, | |
| "grad_norm": 0.9661779192860693, | |
| "learning_rate": 3.9421532120006544e-07, | |
| "loss": 0.2561, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.748502994011976, | |
| "grad_norm": 0.9556330265930582, | |
| "learning_rate": 3.851144890049535e-07, | |
| "loss": 0.2704, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.751497005988024, | |
| "grad_norm": 1.05867747298971, | |
| "learning_rate": 3.761157288866418e-07, | |
| "loss": 0.3349, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.7544910179640718, | |
| "grad_norm": 1.0013296746121176, | |
| "learning_rate": 3.672192398797858e-07, | |
| "loss": 0.2754, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.7574850299401197, | |
| "grad_norm": 1.0703705550045968, | |
| "learning_rate": 3.58425218757002e-07, | |
| "loss": 0.2727, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.7604790419161676, | |
| "grad_norm": 1.0890885790400893, | |
| "learning_rate": 3.497338600245254e-07, | |
| "loss": 0.2705, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.7634730538922154, | |
| "grad_norm": 0.9741753118003843, | |
| "learning_rate": 3.4114535591790233e-07, | |
| "loss": 0.2515, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.7664670658682635, | |
| "grad_norm": 1.0944452295877847, | |
| "learning_rate": 3.326598963977395e-07, | |
| "loss": 0.2589, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.7694610778443114, | |
| "grad_norm": 1.0275735990331376, | |
| "learning_rate": 3.242776691455013e-07, | |
| "loss": 0.2982, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.7724550898203593, | |
| "grad_norm": 0.8420726703593355, | |
| "learning_rate": 3.159988595593616e-07, | |
| "loss": 0.2455, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.7754491017964071, | |
| "grad_norm": 0.9593924117616909, | |
| "learning_rate": 3.078236507501015e-07, | |
| "loss": 0.2408, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.7784431137724552, | |
| "grad_norm": 0.9947122425930526, | |
| "learning_rate": 2.9975222353705757e-07, | |
| "loss": 0.3006, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.781437125748503, | |
| "grad_norm": 0.9962935158980532, | |
| "learning_rate": 2.917847564441256e-07, | |
| "loss": 0.3145, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.784431137724551, | |
| "grad_norm": 1.0508672303220938, | |
| "learning_rate": 2.839214256958106e-07, | |
| "loss": 0.2811, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.7874251497005988, | |
| "grad_norm": 1.0397839051667999, | |
| "learning_rate": 2.7616240521332884e-07, | |
| "loss": 0.2657, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.7904191616766467, | |
| "grad_norm": 0.9300479476583533, | |
| "learning_rate": 2.6850786661076047e-07, | |
| "loss": 0.2821, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.7934131736526946, | |
| "grad_norm": 0.9935404720291086, | |
| "learning_rate": 2.6095797919125533e-07, | |
| "loss": 0.2629, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.7964071856287425, | |
| "grad_norm": 0.9704851413753994, | |
| "learning_rate": 2.5351290994328703e-07, | |
| "loss": 0.3017, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.7964071856287425, | |
| "eval_loss": 0.36320582032203674, | |
| "eval_runtime": 2.076, | |
| "eval_samples_per_second": 26.011, | |
| "eval_steps_per_second": 6.744, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.7994011976047903, | |
| "grad_norm": 0.923994696783988, | |
| "learning_rate": 2.4617282353696093e-07, | |
| "loss": 0.2525, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.8023952095808382, | |
| "grad_norm": 1.1099311606300768, | |
| "learning_rate": 2.3893788232036807e-07, | |
| "loss": 0.3488, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.805389221556886, | |
| "grad_norm": 0.9986226652634611, | |
| "learning_rate": 2.318082463160032e-07, | |
| "loss": 0.3128, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.8083832335329342, | |
| "grad_norm": 0.8792823539479557, | |
| "learning_rate": 2.2478407321721295e-07, | |
| "loss": 0.266, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.811377245508982, | |
| "grad_norm": 0.9246721910855287, | |
| "learning_rate": 2.1786551838471892e-07, | |
| "loss": 0.272, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.81437125748503, | |
| "grad_norm": 1.1421491657235932, | |
| "learning_rate": 2.1105273484317402e-07, | |
| "loss": 0.2355, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.8173652694610778, | |
| "grad_norm": 0.9715050475323485, | |
| "learning_rate": 2.043458732777831e-07, | |
| "loss": 0.2502, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.8203592814371259, | |
| "grad_norm": 1.0994232131890003, | |
| "learning_rate": 1.9774508203096843e-07, | |
| "loss": 0.2886, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.8233532934131738, | |
| "grad_norm": 0.93613158163084, | |
| "learning_rate": 1.9125050709908388e-07, | |
| "loss": 0.2457, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.8263473053892216, | |
| "grad_norm": 1.0329293204932921, | |
| "learning_rate": 1.8486229212919482e-07, | |
| "loss": 0.2926, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.8293413173652695, | |
| "grad_norm": 0.9341728455541087, | |
| "learning_rate": 1.7858057841589281e-07, | |
| "loss": 0.276, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.8323353293413174, | |
| "grad_norm": 1.1485004134731334, | |
| "learning_rate": 1.7240550489817652e-07, | |
| "loss": 0.2992, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.8353293413173652, | |
| "grad_norm": 1.182138384430407, | |
| "learning_rate": 1.66337208156373e-07, | |
| "loss": 0.2679, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.8383233532934131, | |
| "grad_norm": 1.0385308885451339, | |
| "learning_rate": 1.6037582240912175e-07, | |
| "loss": 0.2922, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.841317365269461, | |
| "grad_norm": 0.9874804114145909, | |
| "learning_rate": 1.5452147951040165e-07, | |
| "loss": 0.2849, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.8443113772455089, | |
| "grad_norm": 1.1832916798888142, | |
| "learning_rate": 1.4877430894662037e-07, | |
| "loss": 0.3274, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.8473053892215567, | |
| "grad_norm": 1.0373093637606001, | |
| "learning_rate": 1.4313443783374405e-07, | |
| "loss": 0.2607, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.8502994011976048, | |
| "grad_norm": 0.9414788336196014, | |
| "learning_rate": 1.3760199091449045e-07, | |
| "loss": 0.2399, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.8532934131736527, | |
| "grad_norm": 0.9212450702922258, | |
| "learning_rate": 1.3217709055556638e-07, | |
| "loss": 0.24, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.8562874251497006, | |
| "grad_norm": 0.9843751539995954, | |
| "learning_rate": 1.268598567449647e-07, | |
| "loss": 0.2801, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8592814371257484, | |
| "grad_norm": 1.113476298013695, | |
| "learning_rate": 1.2165040708930763e-07, | |
| "loss": 0.2724, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.8622754491017965, | |
| "grad_norm": 0.8848774254012737, | |
| "learning_rate": 1.1654885681124661e-07, | |
| "loss": 0.272, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.8652694610778444, | |
| "grad_norm": 1.098906114034813, | |
| "learning_rate": 1.1155531874691372e-07, | |
| "loss": 0.3077, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.8682634730538923, | |
| "grad_norm": 0.9979512468135505, | |
| "learning_rate": 1.0666990334342708e-07, | |
| "loss": 0.2507, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.8712574850299402, | |
| "grad_norm": 0.981938985525399, | |
| "learning_rate": 1.0189271865644445e-07, | |
| "loss": 0.2806, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.874251497005988, | |
| "grad_norm": 1.2887570934487664, | |
| "learning_rate": 9.722387034777847e-08, | |
| "loss": 0.3324, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.877245508982036, | |
| "grad_norm": 1.06498109733751, | |
| "learning_rate": 9.266346168305518e-08, | |
| "loss": 0.3294, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.8802395209580838, | |
| "grad_norm": 0.9243248560068268, | |
| "learning_rate": 8.821159352943142e-08, | |
| "loss": 0.271, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.8832335329341316, | |
| "grad_norm": 1.0289876918720562, | |
| "learning_rate": 8.38683643533661e-08, | |
| "loss": 0.289, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.8862275449101795, | |
| "grad_norm": 1.1057964102171312, | |
| "learning_rate": 7.963387021843683e-08, | |
| "loss": 0.2905, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.8892215568862274, | |
| "grad_norm": 1.0479344818531047, | |
| "learning_rate": 7.550820478322285e-08, | |
| "loss": 0.3142, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.8922155688622755, | |
| "grad_norm": 0.970293984457037, | |
| "learning_rate": 7.149145929922607e-08, | |
| "loss": 0.2432, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.8952095808383234, | |
| "grad_norm": 1.0293625951669665, | |
| "learning_rate": 6.758372260885714e-08, | |
| "loss": 0.2966, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.8982035928143712, | |
| "grad_norm": 1.1560159962780443, | |
| "learning_rate": 6.378508114346982e-08, | |
| "loss": 0.2782, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.9011976047904193, | |
| "grad_norm": 1.1616630038529756, | |
| "learning_rate": 6.009561892144744e-08, | |
| "loss": 0.2624, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.9041916167664672, | |
| "grad_norm": 1.0304623182758117, | |
| "learning_rate": 5.651541754634726e-08, | |
| "loss": 0.2541, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.907185628742515, | |
| "grad_norm": 1.0692775431317154, | |
| "learning_rate": 5.304455620509297e-08, | |
| "loss": 0.2949, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.910179640718563, | |
| "grad_norm": 1.0270535615556404, | |
| "learning_rate": 4.968311166622553e-08, | |
| "loss": 0.2803, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.9131736526946108, | |
| "grad_norm": 1.2246168065187621, | |
| "learning_rate": 4.643115827820399e-08, | |
| "loss": 0.2532, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.9161676646706587, | |
| "grad_norm": 1.0974822810039906, | |
| "learning_rate": 4.328876796776071e-08, | |
| "loss": 0.3181, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.9191616766467066, | |
| "grad_norm": 1.098535203964859, | |
| "learning_rate": 4.0256010238310936e-08, | |
| "loss": 0.2667, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.9221556886227544, | |
| "grad_norm": 0.9786972084257104, | |
| "learning_rate": 3.733295216841626e-08, | |
| "loss": 0.2984, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.9251497005988023, | |
| "grad_norm": 1.0311453074080557, | |
| "learning_rate": 3.451965841029914e-08, | |
| "loss": 0.2787, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.9281437125748502, | |
| "grad_norm": 0.9231418615117579, | |
| "learning_rate": 3.181619118841517e-08, | |
| "loss": 0.3238, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.931137724550898, | |
| "grad_norm": 1.1750677511871903, | |
| "learning_rate": 2.9222610298074717e-08, | |
| "loss": 0.2647, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.9341317365269461, | |
| "grad_norm": 1.0120512030231554, | |
| "learning_rate": 2.673897310412288e-08, | |
| "loss": 0.309, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.937125748502994, | |
| "grad_norm": 0.8817355352707243, | |
| "learning_rate": 2.4365334539667717e-08, | |
| "loss": 0.2438, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.9401197604790419, | |
| "grad_norm": 1.0519606006081759, | |
| "learning_rate": 2.210174710486679e-08, | |
| "loss": 0.2717, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.94311377245509, | |
| "grad_norm": 0.9806412476513693, | |
| "learning_rate": 1.99482608657664e-08, | |
| "loss": 0.2738, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.9461077844311379, | |
| "grad_norm": 1.0726832807865938, | |
| "learning_rate": 1.7904923453193056e-08, | |
| "loss": 0.3137, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.9491017964071857, | |
| "grad_norm": 0.9962822284300865, | |
| "learning_rate": 1.5971780061701524e-08, | |
| "loss": 0.2683, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.9520958083832336, | |
| "grad_norm": 1.0572087758886517, | |
| "learning_rate": 1.4148873448573408e-08, | |
| "loss": 0.2509, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.9550898203592815, | |
| "grad_norm": 0.9414554056307058, | |
| "learning_rate": 1.2436243932872349e-08, | |
| "loss": 0.2931, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.9580838323353293, | |
| "grad_norm": 1.0242323388942745, | |
| "learning_rate": 1.0833929394552523e-08, | |
| "loss": 0.2475, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.9610778443113772, | |
| "grad_norm": 0.9469512572922653, | |
| "learning_rate": 9.341965273621522e-09, | |
| "loss": 0.2922, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.964071856287425, | |
| "grad_norm": 1.0307882551096461, | |
| "learning_rate": 7.96038456935322e-09, | |
| "loss": 0.2969, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.967065868263473, | |
| "grad_norm": 1.0254156089121207, | |
| "learning_rate": 6.6892178395611125e-09, | |
| "loss": 0.3197, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.9700598802395208, | |
| "grad_norm": 1.1992555938837843, | |
| "learning_rate": 5.528493199922769e-09, | |
| "loss": 0.3176, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.9730538922155687, | |
| "grad_norm": 1.0192367851852362, | |
| "learning_rate": 4.478236323355312e-09, | |
| "loss": 0.2554, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.9760479041916168, | |
| "grad_norm": 1.1010374506572544, | |
| "learning_rate": 3.538470439448105e-09, | |
| "loss": 0.342, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9790419161676647, | |
| "grad_norm": 1.0043897021209338, | |
| "learning_rate": 2.709216333952602e-09, | |
| "loss": 0.2927, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.9820359281437125, | |
| "grad_norm": 0.9301795131335764, | |
| "learning_rate": 1.9904923483171632e-09, | |
| "loss": 0.2454, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.9850299401197606, | |
| "grad_norm": 1.0714797530134446, | |
| "learning_rate": 1.3823143792851545e-09, | |
| "loss": 0.334, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.9880239520958085, | |
| "grad_norm": 1.1049420069328866, | |
| "learning_rate": 8.846958785418969e-10, | |
| "loss": 0.3257, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.9910179640718564, | |
| "grad_norm": 1.0252696177799858, | |
| "learning_rate": 4.97647852417682e-10, | |
| "loss": 0.2529, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.9940119760479043, | |
| "grad_norm": 0.998494728565151, | |
| "learning_rate": 2.2117886164407797e-10, | |
| "loss": 0.2675, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.9970059880239521, | |
| "grad_norm": 0.8885624060187792, | |
| "learning_rate": 5.529502116519148e-11, | |
| "loss": 0.2458, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.9847232801633843, | |
| "learning_rate": 0.0, | |
| "loss": 0.1923, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 668, | |
| "total_flos": 21199392964608.0, | |
| "train_loss": 0.3376133635327845, | |
| "train_runtime": 1178.6141, | |
| "train_samples_per_second": 9.043, | |
| "train_steps_per_second": 0.567 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 668, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 21199392964608.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |