| { | |
| "best_metric": 0.9593731449578206, | |
| "best_model_checkpoint": "ModernBERT-domain-classifier/checkpoint-31908", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 47862, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006268020559107434, | |
| "grad_norm": 12.357645034790039, | |
| "learning_rate": 4.9968659897204466e-05, | |
| "loss": 2.0025, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.012536041118214868, | |
| "grad_norm": 20.970069885253906, | |
| "learning_rate": 4.9937319794408924e-05, | |
| "loss": 1.6744, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.018804061677322303, | |
| "grad_norm": 15.532102584838867, | |
| "learning_rate": 4.9905979691613395e-05, | |
| "loss": 1.3409, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.025072082236429736, | |
| "grad_norm": 23.552701950073242, | |
| "learning_rate": 4.987463958881786e-05, | |
| "loss": 1.0365, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.03134010279553717, | |
| "grad_norm": 13.55091381072998, | |
| "learning_rate": 4.9843299486022316e-05, | |
| "loss": 0.9218, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.037608123354644606, | |
| "grad_norm": 33.0805549621582, | |
| "learning_rate": 4.981195938322678e-05, | |
| "loss": 0.809, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.04387614391375204, | |
| "grad_norm": 8.363154411315918, | |
| "learning_rate": 4.978061928043124e-05, | |
| "loss": 0.6969, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.05014416447285947, | |
| "grad_norm": 10.521018981933594, | |
| "learning_rate": 4.97492791776357e-05, | |
| "loss": 0.7812, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.056412185031966905, | |
| "grad_norm": 11.835216522216797, | |
| "learning_rate": 4.971793907484017e-05, | |
| "loss": 0.6354, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.06268020559107434, | |
| "grad_norm": 15.48874282836914, | |
| "learning_rate": 4.968659897204463e-05, | |
| "loss": 0.589, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06894822615018177, | |
| "grad_norm": 5.225059986114502, | |
| "learning_rate": 4.9655258869249094e-05, | |
| "loss": 0.5623, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.07521624670928921, | |
| "grad_norm": 8.500799179077148, | |
| "learning_rate": 4.962391876645356e-05, | |
| "loss": 0.4375, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.08148426726839664, | |
| "grad_norm": 0.36157914996147156, | |
| "learning_rate": 4.9592578663658015e-05, | |
| "loss": 0.4691, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.08775228782750408, | |
| "grad_norm": 0.9055732488632202, | |
| "learning_rate": 4.9561238560862486e-05, | |
| "loss": 0.5083, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.0940203083866115, | |
| "grad_norm": 15.834701538085938, | |
| "learning_rate": 4.9529898458066944e-05, | |
| "loss": 0.4496, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.10028832894571894, | |
| "grad_norm": 8.808372497558594, | |
| "learning_rate": 4.949855835527141e-05, | |
| "loss": 0.4379, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.10655634950482637, | |
| "grad_norm": 13.547663688659668, | |
| "learning_rate": 4.946721825247587e-05, | |
| "loss": 0.4547, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.11282437006393381, | |
| "grad_norm": 12.697524070739746, | |
| "learning_rate": 4.943587814968033e-05, | |
| "loss": 0.4347, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.11909239062304125, | |
| "grad_norm": 14.472429275512695, | |
| "learning_rate": 4.940453804688479e-05, | |
| "loss": 0.3853, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.1253604111821487, | |
| "grad_norm": 3.351533889770508, | |
| "learning_rate": 4.9373197944089264e-05, | |
| "loss": 0.4489, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.1316284317412561, | |
| "grad_norm": 0.23762217164039612, | |
| "learning_rate": 4.934185784129372e-05, | |
| "loss": 0.3964, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.13789645230036354, | |
| "grad_norm": 39.0670166015625, | |
| "learning_rate": 4.9310517738498186e-05, | |
| "loss": 0.3861, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.14416447285947098, | |
| "grad_norm": 0.047631457448005676, | |
| "learning_rate": 4.927917763570265e-05, | |
| "loss": 0.3109, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.15043249341857842, | |
| "grad_norm": 17.598962783813477, | |
| "learning_rate": 4.924783753290711e-05, | |
| "loss": 0.3623, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.15670051397768583, | |
| "grad_norm": 6.580572128295898, | |
| "learning_rate": 4.921649743011158e-05, | |
| "loss": 0.2885, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.16296853453679327, | |
| "grad_norm": 29.607555389404297, | |
| "learning_rate": 4.9185157327316035e-05, | |
| "loss": 0.3811, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.16923655509590071, | |
| "grad_norm": 27.72866439819336, | |
| "learning_rate": 4.91538172245205e-05, | |
| "loss": 0.3464, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.17550457565500815, | |
| "grad_norm": 0.1403748244047165, | |
| "learning_rate": 4.912247712172496e-05, | |
| "loss": 0.3022, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.1817725962141156, | |
| "grad_norm": 0.08881185203790665, | |
| "learning_rate": 4.909113701892942e-05, | |
| "loss": 0.3729, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.188040616773223, | |
| "grad_norm": 6.994024276733398, | |
| "learning_rate": 4.9059796916133885e-05, | |
| "loss": 0.3545, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.19430863733233045, | |
| "grad_norm": 8.588685035705566, | |
| "learning_rate": 4.9028456813338356e-05, | |
| "loss": 0.3284, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.2005766578914379, | |
| "grad_norm": 0.32822656631469727, | |
| "learning_rate": 4.899711671054281e-05, | |
| "loss": 0.3661, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.20684467845054533, | |
| "grad_norm": 1.6464039087295532, | |
| "learning_rate": 4.896577660774728e-05, | |
| "loss": 0.3359, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.21311269900965274, | |
| "grad_norm": 0.12039870768785477, | |
| "learning_rate": 4.8934436504951734e-05, | |
| "loss": 0.353, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.21938071956876018, | |
| "grad_norm": 2.3566761016845703, | |
| "learning_rate": 4.89030964021562e-05, | |
| "loss": 0.3458, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.22564874012786762, | |
| "grad_norm": 0.4579617977142334, | |
| "learning_rate": 4.887175629936066e-05, | |
| "loss": 0.3018, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.23191676068697506, | |
| "grad_norm": 14.720295906066895, | |
| "learning_rate": 4.884041619656513e-05, | |
| "loss": 0.3, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.2381847812460825, | |
| "grad_norm": 20.024921417236328, | |
| "learning_rate": 4.880907609376959e-05, | |
| "loss": 0.3259, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.2444528018051899, | |
| "grad_norm": 0.1657363623380661, | |
| "learning_rate": 4.8777735990974055e-05, | |
| "loss": 0.3422, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.2507208223642974, | |
| "grad_norm": 13.921937942504883, | |
| "learning_rate": 4.874639588817851e-05, | |
| "loss": 0.3179, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.2569888429234048, | |
| "grad_norm": 5.137484550476074, | |
| "learning_rate": 4.8715055785382976e-05, | |
| "loss": 0.3224, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.2632568634825122, | |
| "grad_norm": 0.32827550172805786, | |
| "learning_rate": 4.868371568258744e-05, | |
| "loss": 0.3353, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.2695248840416197, | |
| "grad_norm": 0.013694345951080322, | |
| "learning_rate": 4.8652375579791905e-05, | |
| "loss": 0.2706, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.2757929046007271, | |
| "grad_norm": 4.901852130889893, | |
| "learning_rate": 4.862103547699637e-05, | |
| "loss": 0.3296, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.2820609251598345, | |
| "grad_norm": 1.1784251928329468, | |
| "learning_rate": 4.8589695374200826e-05, | |
| "loss": 0.2737, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.28832894571894196, | |
| "grad_norm": 4.07144021987915, | |
| "learning_rate": 4.855835527140529e-05, | |
| "loss": 0.3078, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.2945969662780494, | |
| "grad_norm": 27.181507110595703, | |
| "learning_rate": 4.8527015168609754e-05, | |
| "loss": 0.3688, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.30086498683715684, | |
| "grad_norm": 2.0683059692382812, | |
| "learning_rate": 4.849567506581422e-05, | |
| "loss": 0.269, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.30713300739626426, | |
| "grad_norm": 0.0376540943980217, | |
| "learning_rate": 4.846433496301868e-05, | |
| "loss": 0.2659, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.31340102795537167, | |
| "grad_norm": 4.966544151306152, | |
| "learning_rate": 4.8432994860223146e-05, | |
| "loss": 0.2155, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.31966904851447914, | |
| "grad_norm": 0.2637276351451874, | |
| "learning_rate": 4.8401654757427604e-05, | |
| "loss": 0.2505, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.32593706907358655, | |
| "grad_norm": 9.37961196899414, | |
| "learning_rate": 4.837031465463207e-05, | |
| "loss": 0.347, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.332205089632694, | |
| "grad_norm": 0.15031716227531433, | |
| "learning_rate": 4.833897455183653e-05, | |
| "loss": 0.2586, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.33847311019180143, | |
| "grad_norm": 0.07241084426641464, | |
| "learning_rate": 4.8307634449040996e-05, | |
| "loss": 0.2508, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.34474113075090884, | |
| "grad_norm": 6.840641975402832, | |
| "learning_rate": 4.827629434624546e-05, | |
| "loss": 0.2748, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.3510091513100163, | |
| "grad_norm": 8.171294212341309, | |
| "learning_rate": 4.824495424344992e-05, | |
| "loss": 0.2542, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.3572771718691237, | |
| "grad_norm": 3.3636763095855713, | |
| "learning_rate": 4.821361414065438e-05, | |
| "loss": 0.2947, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.3635451924282312, | |
| "grad_norm": 1.3265916109085083, | |
| "learning_rate": 4.8182274037858846e-05, | |
| "loss": 0.2655, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.3698132129873386, | |
| "grad_norm": 21.420007705688477, | |
| "learning_rate": 4.815093393506331e-05, | |
| "loss": 0.2877, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.376081233546446, | |
| "grad_norm": 4.7249250411987305, | |
| "learning_rate": 4.8119593832267774e-05, | |
| "loss": 0.279, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.3823492541055535, | |
| "grad_norm": 0.15022224187850952, | |
| "learning_rate": 4.808825372947223e-05, | |
| "loss": 0.2357, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.3886172746646609, | |
| "grad_norm": 0.4195699989795685, | |
| "learning_rate": 4.8056913626676695e-05, | |
| "loss": 0.2828, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.3948852952237683, | |
| "grad_norm": 1.3854116201400757, | |
| "learning_rate": 4.802557352388116e-05, | |
| "loss": 0.2591, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.4011533157828758, | |
| "grad_norm": 0.041278645396232605, | |
| "learning_rate": 4.7994233421085624e-05, | |
| "loss": 0.1858, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.4074213363419832, | |
| "grad_norm": 0.029629528522491455, | |
| "learning_rate": 4.796289331829009e-05, | |
| "loss": 0.2609, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.41368935690109065, | |
| "grad_norm": 8.087828636169434, | |
| "learning_rate": 4.793155321549455e-05, | |
| "loss": 0.2383, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.41995737746019807, | |
| "grad_norm": 13.660598754882812, | |
| "learning_rate": 4.790021311269901e-05, | |
| "loss": 0.266, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.4262253980193055, | |
| "grad_norm": 14.072225570678711, | |
| "learning_rate": 4.786887300990347e-05, | |
| "loss": 0.241, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.43249341857841295, | |
| "grad_norm": 0.16693659126758575, | |
| "learning_rate": 4.783753290710794e-05, | |
| "loss": 0.2785, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.43876143913752036, | |
| "grad_norm": 5.747068405151367, | |
| "learning_rate": 4.78061928043124e-05, | |
| "loss": 0.2253, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.4450294596966278, | |
| "grad_norm": 0.04524581879377365, | |
| "learning_rate": 4.7774852701516865e-05, | |
| "loss": 0.276, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.45129748025573524, | |
| "grad_norm": 36.236358642578125, | |
| "learning_rate": 4.774351259872132e-05, | |
| "loss": 0.2014, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.45756550081484265, | |
| "grad_norm": 0.07485531270503998, | |
| "learning_rate": 4.771217249592579e-05, | |
| "loss": 0.285, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.4638335213739501, | |
| "grad_norm": 4.288136959075928, | |
| "learning_rate": 4.768083239313025e-05, | |
| "loss": 0.2378, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.47010154193305753, | |
| "grad_norm": 13.278217315673828, | |
| "learning_rate": 4.7649492290334715e-05, | |
| "loss": 0.281, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.476369562492165, | |
| "grad_norm": 0.08824972063302994, | |
| "learning_rate": 4.761815218753918e-05, | |
| "loss": 0.2629, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.4826375830512724, | |
| "grad_norm": 4.795785903930664, | |
| "learning_rate": 4.758681208474364e-05, | |
| "loss": 0.2207, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.4889056036103798, | |
| "grad_norm": 0.27128365635871887, | |
| "learning_rate": 4.75554719819481e-05, | |
| "loss": 0.2615, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.4951736241694873, | |
| "grad_norm": 0.5811434388160706, | |
| "learning_rate": 4.7524131879152565e-05, | |
| "loss": 0.2678, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.5014416447285948, | |
| "grad_norm": 0.03569044545292854, | |
| "learning_rate": 4.749279177635703e-05, | |
| "loss": 0.2467, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.5077096652877021, | |
| "grad_norm": 0.034228041768074036, | |
| "learning_rate": 4.746145167356149e-05, | |
| "loss": 0.2433, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.5139776858468096, | |
| "grad_norm": 1.4098780155181885, | |
| "learning_rate": 4.743011157076596e-05, | |
| "loss": 0.2147, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.520245706405917, | |
| "grad_norm": 9.719144821166992, | |
| "learning_rate": 4.7398771467970414e-05, | |
| "loss": 0.2991, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.5265137269650244, | |
| "grad_norm": 8.547225952148438, | |
| "learning_rate": 4.736743136517488e-05, | |
| "loss": 0.208, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.5327817475241319, | |
| "grad_norm": 0.24977388978004456, | |
| "learning_rate": 4.733609126237934e-05, | |
| "loss": 0.2344, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.5390497680832393, | |
| "grad_norm": 6.569125175476074, | |
| "learning_rate": 4.730475115958381e-05, | |
| "loss": 0.2045, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.5453177886423467, | |
| "grad_norm": 2.6308412551879883, | |
| "learning_rate": 4.727341105678827e-05, | |
| "loss": 0.2342, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.5515858092014542, | |
| "grad_norm": 2.902594804763794, | |
| "learning_rate": 4.724207095399273e-05, | |
| "loss": 0.2608, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.5578538297605616, | |
| "grad_norm": 0.1357877552509308, | |
| "learning_rate": 4.721073085119719e-05, | |
| "loss": 0.2196, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.564121850319669, | |
| "grad_norm": 0.747113823890686, | |
| "learning_rate": 4.7179390748401656e-05, | |
| "loss": 0.2465, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.5703898708787765, | |
| "grad_norm": 0.24382618069648743, | |
| "learning_rate": 4.714805064560612e-05, | |
| "loss": 0.224, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.5766578914378839, | |
| "grad_norm": 0.23163369297981262, | |
| "learning_rate": 4.7116710542810584e-05, | |
| "loss": 0.3044, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.5829259119969914, | |
| "grad_norm": 12.491781234741211, | |
| "learning_rate": 4.708537044001505e-05, | |
| "loss": 0.2169, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.5891939325560988, | |
| "grad_norm": 1.7576824426651, | |
| "learning_rate": 4.7054030337219506e-05, | |
| "loss": 0.2451, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.5954619531152062, | |
| "grad_norm": 5.619567394256592, | |
| "learning_rate": 4.702269023442397e-05, | |
| "loss": 0.1866, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.6017299736743137, | |
| "grad_norm": 6.3452372550964355, | |
| "learning_rate": 4.6991350131628434e-05, | |
| "loss": 0.2275, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.607997994233421, | |
| "grad_norm": 0.31002557277679443, | |
| "learning_rate": 4.69600100288329e-05, | |
| "loss": 0.2169, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.6142660147925285, | |
| "grad_norm": 1.301358938217163, | |
| "learning_rate": 4.692866992603736e-05, | |
| "loss": 0.1886, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.620534035351636, | |
| "grad_norm": 0.06169494241476059, | |
| "learning_rate": 4.689732982324182e-05, | |
| "loss": 0.1638, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.6268020559107433, | |
| "grad_norm": 13.020919799804688, | |
| "learning_rate": 4.6865989720446284e-05, | |
| "loss": 0.1943, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.6330700764698508, | |
| "grad_norm": 7.256196022033691, | |
| "learning_rate": 4.683464961765075e-05, | |
| "loss": 0.2656, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.6393380970289583, | |
| "grad_norm": 0.7037761807441711, | |
| "learning_rate": 4.680330951485521e-05, | |
| "loss": 0.2449, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.6456061175880657, | |
| "grad_norm": 0.023662934079766273, | |
| "learning_rate": 4.6771969412059676e-05, | |
| "loss": 0.1742, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.6518741381471731, | |
| "grad_norm": 0.11139848828315735, | |
| "learning_rate": 4.674062930926414e-05, | |
| "loss": 0.2589, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.6581421587062806, | |
| "grad_norm": 3.553799867630005, | |
| "learning_rate": 4.67092892064686e-05, | |
| "loss": 0.2232, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.664410179265388, | |
| "grad_norm": 0.04857179895043373, | |
| "learning_rate": 4.667794910367306e-05, | |
| "loss": 0.218, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.6706781998244954, | |
| "grad_norm": 12.25068473815918, | |
| "learning_rate": 4.6646609000877526e-05, | |
| "loss": 0.1691, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.6769462203836029, | |
| "grad_norm": 0.07977677136659622, | |
| "learning_rate": 4.661526889808199e-05, | |
| "loss": 0.1842, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.6832142409427103, | |
| "grad_norm": 23.676185607910156, | |
| "learning_rate": 4.6583928795286454e-05, | |
| "loss": 0.2229, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.6894822615018177, | |
| "grad_norm": 6.073796272277832, | |
| "learning_rate": 4.655258869249091e-05, | |
| "loss": 0.1825, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.6957502820609252, | |
| "grad_norm": 0.6215938925743103, | |
| "learning_rate": 4.6521248589695375e-05, | |
| "loss": 0.2086, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.7020183026200326, | |
| "grad_norm": 32.23947525024414, | |
| "learning_rate": 4.648990848689984e-05, | |
| "loss": 0.1622, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.70828632317914, | |
| "grad_norm": 10.441763877868652, | |
| "learning_rate": 4.6458568384104303e-05, | |
| "loss": 0.1814, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.7145543437382474, | |
| "grad_norm": 0.10066933929920197, | |
| "learning_rate": 4.642722828130877e-05, | |
| "loss": 0.2305, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.7208223642973549, | |
| "grad_norm": 13.855279922485352, | |
| "learning_rate": 4.639588817851323e-05, | |
| "loss": 0.1431, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.7270903848564624, | |
| "grad_norm": 3.840632915496826, | |
| "learning_rate": 4.636454807571769e-05, | |
| "loss": 0.2341, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.7333584054155697, | |
| "grad_norm": 7.0180511474609375, | |
| "learning_rate": 4.633320797292215e-05, | |
| "loss": 0.1847, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.7396264259746772, | |
| "grad_norm": 3.461946964263916, | |
| "learning_rate": 4.630186787012662e-05, | |
| "loss": 0.2863, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.7458944465337847, | |
| "grad_norm": 12.662203788757324, | |
| "learning_rate": 4.627052776733108e-05, | |
| "loss": 0.2175, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.752162467092892, | |
| "grad_norm": 5.079529762268066, | |
| "learning_rate": 4.6239187664535545e-05, | |
| "loss": 0.2338, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.7584304876519995, | |
| "grad_norm": 0.03900396078824997, | |
| "learning_rate": 4.620784756174e-05, | |
| "loss": 0.2092, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.764698508211107, | |
| "grad_norm": 0.748273491859436, | |
| "learning_rate": 4.617650745894447e-05, | |
| "loss": 0.2213, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.7709665287702143, | |
| "grad_norm": 6.272397994995117, | |
| "learning_rate": 4.614516735614893e-05, | |
| "loss": 0.1807, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.7772345493293218, | |
| "grad_norm": 6.342704772949219, | |
| "learning_rate": 4.6113827253353395e-05, | |
| "loss": 0.2114, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.7835025698884293, | |
| "grad_norm": 15.692971229553223, | |
| "learning_rate": 4.608248715055786e-05, | |
| "loss": 0.1815, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.7897705904475366, | |
| "grad_norm": 0.11576833575963974, | |
| "learning_rate": 4.6051147047762316e-05, | |
| "loss": 0.19, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.7960386110066441, | |
| "grad_norm": 0.6892977952957153, | |
| "learning_rate": 4.601980694496678e-05, | |
| "loss": 0.2188, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.8023066315657515, | |
| "grad_norm": 0.3841142952442169, | |
| "learning_rate": 4.5988466842171245e-05, | |
| "loss": 0.1978, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.808574652124859, | |
| "grad_norm": 0.10382460802793503, | |
| "learning_rate": 4.595712673937571e-05, | |
| "loss": 0.207, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.8148426726839664, | |
| "grad_norm": 9.244260787963867, | |
| "learning_rate": 4.592578663658017e-05, | |
| "loss": 0.2113, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.8211106932430738, | |
| "grad_norm": 2.754213333129883, | |
| "learning_rate": 4.589444653378464e-05, | |
| "loss": 0.2173, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.8273787138021813, | |
| "grad_norm": 0.046558987349271774, | |
| "learning_rate": 4.5863106430989094e-05, | |
| "loss": 0.1983, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.8336467343612887, | |
| "grad_norm": 0.7020443081855774, | |
| "learning_rate": 4.583176632819356e-05, | |
| "loss": 0.1804, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.8399147549203961, | |
| "grad_norm": 2.7180488109588623, | |
| "learning_rate": 4.5800426225398016e-05, | |
| "loss": 0.2352, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.8461827754795036, | |
| "grad_norm": 4.205974102020264, | |
| "learning_rate": 4.5769086122602487e-05, | |
| "loss": 0.1915, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.852450796038611, | |
| "grad_norm": 0.0550006739795208, | |
| "learning_rate": 4.573774601980695e-05, | |
| "loss": 0.2185, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.8587188165977184, | |
| "grad_norm": 0.3276146352291107, | |
| "learning_rate": 4.570640591701141e-05, | |
| "loss": 0.1954, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.8649868371568259, | |
| "grad_norm": 0.15139640867710114, | |
| "learning_rate": 4.567506581421587e-05, | |
| "loss": 0.1623, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.8712548577159334, | |
| "grad_norm": 6.504459381103516, | |
| "learning_rate": 4.5643725711420336e-05, | |
| "loss": 0.1982, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.8775228782750407, | |
| "grad_norm": 0.07944973558187485, | |
| "learning_rate": 4.5612385608624794e-05, | |
| "loss": 0.2283, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.8837908988341482, | |
| "grad_norm": 0.19034264981746674, | |
| "learning_rate": 4.5581045505829264e-05, | |
| "loss": 0.1896, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 0.8900589193932557, | |
| "grad_norm": 0.6090401411056519, | |
| "learning_rate": 4.554970540303373e-05, | |
| "loss": 0.1645, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 0.896326939952363, | |
| "grad_norm": 0.03382911533117294, | |
| "learning_rate": 4.5518365300238186e-05, | |
| "loss": 0.2243, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 0.9025949605114705, | |
| "grad_norm": 0.9687917232513428, | |
| "learning_rate": 4.548702519744265e-05, | |
| "loss": 0.1683, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 0.908862981070578, | |
| "grad_norm": 0.07237120717763901, | |
| "learning_rate": 4.545568509464711e-05, | |
| "loss": 0.1632, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.9151310016296853, | |
| "grad_norm": 3.475529432296753, | |
| "learning_rate": 4.542434499185158e-05, | |
| "loss": 0.2634, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 0.9213990221887928, | |
| "grad_norm": 0.0120365209877491, | |
| "learning_rate": 4.539300488905604e-05, | |
| "loss": 0.1222, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 0.9276670427479002, | |
| "grad_norm": 0.8651108741760254, | |
| "learning_rate": 4.53616647862605e-05, | |
| "loss": 0.2346, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 0.9339350633070076, | |
| "grad_norm": 0.01904441975057125, | |
| "learning_rate": 4.5330324683464964e-05, | |
| "loss": 0.1765, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 0.9402030838661151, | |
| "grad_norm": 6.470919132232666, | |
| "learning_rate": 4.529898458066943e-05, | |
| "loss": 0.2147, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.9464711044252225, | |
| "grad_norm": 0.36246490478515625, | |
| "learning_rate": 4.5267644477873885e-05, | |
| "loss": 0.2193, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 0.95273912498433, | |
| "grad_norm": 9.216349601745605, | |
| "learning_rate": 4.5236304375078356e-05, | |
| "loss": 0.1617, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 0.9590071455434374, | |
| "grad_norm": 0.5773089528083801, | |
| "learning_rate": 4.520496427228281e-05, | |
| "loss": 0.1949, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 0.9652751661025448, | |
| "grad_norm": 1.643012285232544, | |
| "learning_rate": 4.517362416948728e-05, | |
| "loss": 0.2192, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 0.9715431866616523, | |
| "grad_norm": 18.9224853515625, | |
| "learning_rate": 4.514228406669174e-05, | |
| "loss": 0.1987, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.9778112072207596, | |
| "grad_norm": 3.0048677921295166, | |
| "learning_rate": 4.51109439638962e-05, | |
| "loss": 0.2092, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 0.9840792277798671, | |
| "grad_norm": 0.02751915343105793, | |
| "learning_rate": 4.507960386110067e-05, | |
| "loss": 0.1458, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 0.9903472483389746, | |
| "grad_norm": 1.8317995071411133, | |
| "learning_rate": 4.5048263758305134e-05, | |
| "loss": 0.1322, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 0.9966152688980819, | |
| "grad_norm": 5.543197154998779, | |
| "learning_rate": 4.501692365550959e-05, | |
| "loss": 0.201, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_f1": 0.9497412360183047, | |
| "eval_loss": 0.20201221108436584, | |
| "eval_runtime": 47.5825, | |
| "eval_samples_per_second": 298.03, | |
| "eval_steps_per_second": 37.262, | |
| "step": 15954 | |
| }, | |
| { | |
| "epoch": 1.0028832894571895, | |
| "grad_norm": 0.2572493553161621, | |
| "learning_rate": 4.4985583552714055e-05, | |
| "loss": 0.1726, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.0091513100162968, | |
| "grad_norm": 0.023830950260162354, | |
| "learning_rate": 4.495424344991852e-05, | |
| "loss": 0.1437, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 1.0154193305754042, | |
| "grad_norm": 0.2514733672142029, | |
| "learning_rate": 4.492290334712298e-05, | |
| "loss": 0.1482, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 1.0216873511345117, | |
| "grad_norm": 0.33695363998413086, | |
| "learning_rate": 4.489156324432745e-05, | |
| "loss": 0.1092, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 1.0279553716936192, | |
| "grad_norm": 8.34520435333252, | |
| "learning_rate": 4.4860223141531905e-05, | |
| "loss": 0.1169, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 1.0342233922527266, | |
| "grad_norm": 0.4668201804161072, | |
| "learning_rate": 4.482888303873637e-05, | |
| "loss": 0.1143, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.040491412811834, | |
| "grad_norm": 0.028503745794296265, | |
| "learning_rate": 4.479754293594083e-05, | |
| "loss": 0.1405, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 1.0467594333709414, | |
| "grad_norm": 0.6004204750061035, | |
| "learning_rate": 4.476620283314529e-05, | |
| "loss": 0.1202, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 1.0530274539300488, | |
| "grad_norm": 0.027077751234173775, | |
| "learning_rate": 4.473486273034976e-05, | |
| "loss": 0.1141, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 1.0592954744891563, | |
| "grad_norm": 0.026096515357494354, | |
| "learning_rate": 4.4703522627554225e-05, | |
| "loss": 0.1982, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 1.0655634950482638, | |
| "grad_norm": 10.615415573120117, | |
| "learning_rate": 4.467218252475868e-05, | |
| "loss": 0.1293, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.0718315156073712, | |
| "grad_norm": 0.09615737944841385, | |
| "learning_rate": 4.464084242196315e-05, | |
| "loss": 0.1146, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 1.0780995361664787, | |
| "grad_norm": 8.987353324890137, | |
| "learning_rate": 4.4609502319167604e-05, | |
| "loss": 0.1449, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 1.0843675567255862, | |
| "grad_norm": 0.06758946180343628, | |
| "learning_rate": 4.457816221637207e-05, | |
| "loss": 0.1195, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 1.0906355772846934, | |
| "grad_norm": 0.04790417477488518, | |
| "learning_rate": 4.454682211357654e-05, | |
| "loss": 0.1694, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 1.0969035978438009, | |
| "grad_norm": 5.665248394012451, | |
| "learning_rate": 4.4515482010780996e-05, | |
| "loss": 0.1474, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.1031716184029083, | |
| "grad_norm": 0.05797085538506508, | |
| "learning_rate": 4.448414190798546e-05, | |
| "loss": 0.1105, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 1.1094396389620158, | |
| "grad_norm": 0.03206124156713486, | |
| "learning_rate": 4.4452801805189925e-05, | |
| "loss": 0.0949, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 1.1157076595211233, | |
| "grad_norm": 0.08074434101581573, | |
| "learning_rate": 4.442146170239438e-05, | |
| "loss": 0.1655, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 1.1219756800802307, | |
| "grad_norm": 0.020155681297183037, | |
| "learning_rate": 4.439012159959885e-05, | |
| "loss": 0.1137, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 1.128243700639338, | |
| "grad_norm": 0.16802071034908295, | |
| "learning_rate": 4.435878149680332e-05, | |
| "loss": 0.138, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.1345117211984455, | |
| "grad_norm": 5.59773063659668, | |
| "learning_rate": 4.4327441394007774e-05, | |
| "loss": 0.1327, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 1.140779741757553, | |
| "grad_norm": 0.5361349582672119, | |
| "learning_rate": 4.429610129121224e-05, | |
| "loss": 0.1515, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 1.1470477623166604, | |
| "grad_norm": 0.004768712446093559, | |
| "learning_rate": 4.4264761188416696e-05, | |
| "loss": 0.0917, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 1.1533157828757679, | |
| "grad_norm": 0.11958374828100204, | |
| "learning_rate": 4.423342108562116e-05, | |
| "loss": 0.1423, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 1.1595838034348753, | |
| "grad_norm": 0.032529979944229126, | |
| "learning_rate": 4.420208098282563e-05, | |
| "loss": 0.122, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.1658518239939828, | |
| "grad_norm": 5.665451526641846, | |
| "learning_rate": 4.417074088003009e-05, | |
| "loss": 0.1351, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 1.17211984455309, | |
| "grad_norm": 5.187871932983398, | |
| "learning_rate": 4.413940077723455e-05, | |
| "loss": 0.1364, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 1.1783878651121975, | |
| "grad_norm": 0.017089562490582466, | |
| "learning_rate": 4.4108060674439016e-05, | |
| "loss": 0.1011, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 1.184655885671305, | |
| "grad_norm": 0.6145625114440918, | |
| "learning_rate": 4.4076720571643474e-05, | |
| "loss": 0.1138, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 1.1909239062304124, | |
| "grad_norm": 0.3502688705921173, | |
| "learning_rate": 4.4045380468847944e-05, | |
| "loss": 0.1183, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.19719192678952, | |
| "grad_norm": 0.025053372606635094, | |
| "learning_rate": 4.40140403660524e-05, | |
| "loss": 0.1461, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 1.2034599473486274, | |
| "grad_norm": 4.409730911254883, | |
| "learning_rate": 4.3982700263256866e-05, | |
| "loss": 0.115, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 1.2097279679077348, | |
| "grad_norm": 0.04246415197849274, | |
| "learning_rate": 4.395136016046133e-05, | |
| "loss": 0.1226, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 1.215995988466842, | |
| "grad_norm": 0.061329249292612076, | |
| "learning_rate": 4.392002005766579e-05, | |
| "loss": 0.178, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 1.2222640090259496, | |
| "grad_norm": 0.6803941130638123, | |
| "learning_rate": 4.388867995487025e-05, | |
| "loss": 0.1162, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.228532029585057, | |
| "grad_norm": 0.021054541692137718, | |
| "learning_rate": 4.385733985207472e-05, | |
| "loss": 0.1519, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 1.2348000501441645, | |
| "grad_norm": 0.03315689042210579, | |
| "learning_rate": 4.382599974927918e-05, | |
| "loss": 0.1587, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 1.241068070703272, | |
| "grad_norm": 0.08555241674184799, | |
| "learning_rate": 4.3794659646483644e-05, | |
| "loss": 0.2087, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 1.2473360912623794, | |
| "grad_norm": 0.033810269087553024, | |
| "learning_rate": 4.37633195436881e-05, | |
| "loss": 0.119, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 1.253604111821487, | |
| "grad_norm": 0.06064239889383316, | |
| "learning_rate": 4.3731979440892565e-05, | |
| "loss": 0.1734, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.2598721323805941, | |
| "grad_norm": 0.12100313603878021, | |
| "learning_rate": 4.370063933809703e-05, | |
| "loss": 0.1858, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 1.2661401529397016, | |
| "grad_norm": 5.416502475738525, | |
| "learning_rate": 4.366929923530149e-05, | |
| "loss": 0.1107, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 1.272408173498809, | |
| "grad_norm": 0.41584721207618713, | |
| "learning_rate": 4.363795913250596e-05, | |
| "loss": 0.1767, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 1.2786761940579165, | |
| "grad_norm": 0.030553661286830902, | |
| "learning_rate": 4.360661902971042e-05, | |
| "loss": 0.0947, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 1.284944214617024, | |
| "grad_norm": 5.664852142333984, | |
| "learning_rate": 4.357527892691488e-05, | |
| "loss": 0.1605, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.2912122351761313, | |
| "grad_norm": 0.03313322365283966, | |
| "learning_rate": 4.354393882411934e-05, | |
| "loss": 0.1782, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 1.2974802557352387, | |
| "grad_norm": 0.237337127327919, | |
| "learning_rate": 4.3512598721323814e-05, | |
| "loss": 0.1108, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 1.3037482762943462, | |
| "grad_norm": 0.0343439057469368, | |
| "learning_rate": 4.348125861852827e-05, | |
| "loss": 0.1628, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 1.3100162968534537, | |
| "grad_norm": 1.1754486560821533, | |
| "learning_rate": 4.3449918515732735e-05, | |
| "loss": 0.1332, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 1.3162843174125611, | |
| "grad_norm": 0.0784272775053978, | |
| "learning_rate": 4.341857841293719e-05, | |
| "loss": 0.1358, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.3225523379716686, | |
| "grad_norm": 5.860198974609375, | |
| "learning_rate": 4.338723831014166e-05, | |
| "loss": 0.1316, | |
| "step": 21100 | |
| }, | |
| { | |
| "epoch": 1.328820358530776, | |
| "grad_norm": 0.04818045720458031, | |
| "learning_rate": 4.335589820734612e-05, | |
| "loss": 0.1339, | |
| "step": 21200 | |
| }, | |
| { | |
| "epoch": 1.3350883790898833, | |
| "grad_norm": 0.10664447396993637, | |
| "learning_rate": 4.3324558104550585e-05, | |
| "loss": 0.1532, | |
| "step": 21300 | |
| }, | |
| { | |
| "epoch": 1.3413563996489908, | |
| "grad_norm": 0.01120070368051529, | |
| "learning_rate": 4.329321800175505e-05, | |
| "loss": 0.1361, | |
| "step": 21400 | |
| }, | |
| { | |
| "epoch": 1.3476244202080983, | |
| "grad_norm": 0.04355592280626297, | |
| "learning_rate": 4.326187789895951e-05, | |
| "loss": 0.1589, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.3538924407672057, | |
| "grad_norm": 0.29224634170532227, | |
| "learning_rate": 4.323053779616397e-05, | |
| "loss": 0.1829, | |
| "step": 21600 | |
| }, | |
| { | |
| "epoch": 1.3601604613263132, | |
| "grad_norm": 7.532464981079102, | |
| "learning_rate": 4.3199197693368434e-05, | |
| "loss": 0.1457, | |
| "step": 21700 | |
| }, | |
| { | |
| "epoch": 1.3664284818854207, | |
| "grad_norm": 0.05146549642086029, | |
| "learning_rate": 4.31678575905729e-05, | |
| "loss": 0.1832, | |
| "step": 21800 | |
| }, | |
| { | |
| "epoch": 1.3726965024445281, | |
| "grad_norm": 2.5926992893218994, | |
| "learning_rate": 4.313651748777736e-05, | |
| "loss": 0.1241, | |
| "step": 21900 | |
| }, | |
| { | |
| "epoch": 1.3789645230036354, | |
| "grad_norm": 0.023670606315135956, | |
| "learning_rate": 4.310517738498183e-05, | |
| "loss": 0.1094, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.3852325435627428, | |
| "grad_norm": 0.016848748549818993, | |
| "learning_rate": 4.3073837282186284e-05, | |
| "loss": 0.153, | |
| "step": 22100 | |
| }, | |
| { | |
| "epoch": 1.3915005641218503, | |
| "grad_norm": 0.018959341570734978, | |
| "learning_rate": 4.304249717939075e-05, | |
| "loss": 0.1875, | |
| "step": 22200 | |
| }, | |
| { | |
| "epoch": 1.3977685846809578, | |
| "grad_norm": 4.0403876304626465, | |
| "learning_rate": 4.301115707659521e-05, | |
| "loss": 0.1089, | |
| "step": 22300 | |
| }, | |
| { | |
| "epoch": 1.4040366052400652, | |
| "grad_norm": 16.69829750061035, | |
| "learning_rate": 4.2979816973799676e-05, | |
| "loss": 0.1504, | |
| "step": 22400 | |
| }, | |
| { | |
| "epoch": 1.4103046257991725, | |
| "grad_norm": 11.63906478881836, | |
| "learning_rate": 4.294847687100414e-05, | |
| "loss": 0.1297, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.4165726463582802, | |
| "grad_norm": 0.058484334498643875, | |
| "learning_rate": 4.2917136768208605e-05, | |
| "loss": 0.1609, | |
| "step": 22600 | |
| }, | |
| { | |
| "epoch": 1.4228406669173874, | |
| "grad_norm": 0.10660742968320847, | |
| "learning_rate": 4.288579666541306e-05, | |
| "loss": 0.117, | |
| "step": 22700 | |
| }, | |
| { | |
| "epoch": 1.4291086874764949, | |
| "grad_norm": 0.8750506639480591, | |
| "learning_rate": 4.2854456562617526e-05, | |
| "loss": 0.2154, | |
| "step": 22800 | |
| }, | |
| { | |
| "epoch": 1.4353767080356024, | |
| "grad_norm": 0.01947125606238842, | |
| "learning_rate": 4.282311645982199e-05, | |
| "loss": 0.1373, | |
| "step": 22900 | |
| }, | |
| { | |
| "epoch": 1.4416447285947098, | |
| "grad_norm": 7.973813533782959, | |
| "learning_rate": 4.2791776357026454e-05, | |
| "loss": 0.1175, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.4479127491538173, | |
| "grad_norm": 0.2649206221103668, | |
| "learning_rate": 4.276043625423092e-05, | |
| "loss": 0.1472, | |
| "step": 23100 | |
| }, | |
| { | |
| "epoch": 1.4541807697129245, | |
| "grad_norm": 0.06597866863012314, | |
| "learning_rate": 4.2729096151435376e-05, | |
| "loss": 0.1212, | |
| "step": 23200 | |
| }, | |
| { | |
| "epoch": 1.4604487902720322, | |
| "grad_norm": 0.06586948037147522, | |
| "learning_rate": 4.269775604863984e-05, | |
| "loss": 0.1713, | |
| "step": 23300 | |
| }, | |
| { | |
| "epoch": 1.4667168108311395, | |
| "grad_norm": 0.009352350607514381, | |
| "learning_rate": 4.2666415945844304e-05, | |
| "loss": 0.1581, | |
| "step": 23400 | |
| }, | |
| { | |
| "epoch": 1.472984831390247, | |
| "grad_norm": 4.44644832611084, | |
| "learning_rate": 4.263507584304877e-05, | |
| "loss": 0.1527, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.4792528519493544, | |
| "grad_norm": 0.09516802430152893, | |
| "learning_rate": 4.260373574025323e-05, | |
| "loss": 0.1353, | |
| "step": 23600 | |
| }, | |
| { | |
| "epoch": 1.4855208725084619, | |
| "grad_norm": 0.0312493946403265, | |
| "learning_rate": 4.257239563745769e-05, | |
| "loss": 0.1086, | |
| "step": 23700 | |
| }, | |
| { | |
| "epoch": 1.4917888930675693, | |
| "grad_norm": 0.153848335146904, | |
| "learning_rate": 4.2541055534662153e-05, | |
| "loss": 0.13, | |
| "step": 23800 | |
| }, | |
| { | |
| "epoch": 1.4980569136266766, | |
| "grad_norm": 0.05382615327835083, | |
| "learning_rate": 4.250971543186662e-05, | |
| "loss": 0.13, | |
| "step": 23900 | |
| }, | |
| { | |
| "epoch": 1.5043249341857843, | |
| "grad_norm": 3.612338066101074, | |
| "learning_rate": 4.247837532907108e-05, | |
| "loss": 0.0916, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.5105929547448915, | |
| "grad_norm": 0.16405566036701202, | |
| "learning_rate": 4.2447035226275546e-05, | |
| "loss": 0.1233, | |
| "step": 24100 | |
| }, | |
| { | |
| "epoch": 1.516860975303999, | |
| "grad_norm": 0.043971579521894455, | |
| "learning_rate": 4.241569512348001e-05, | |
| "loss": 0.1144, | |
| "step": 24200 | |
| }, | |
| { | |
| "epoch": 1.5231289958631065, | |
| "grad_norm": 0.7001562118530273, | |
| "learning_rate": 4.238435502068447e-05, | |
| "loss": 0.1285, | |
| "step": 24300 | |
| }, | |
| { | |
| "epoch": 1.5293970164222137, | |
| "grad_norm": 4.435388088226318, | |
| "learning_rate": 4.235301491788893e-05, | |
| "loss": 0.1002, | |
| "step": 24400 | |
| }, | |
| { | |
| "epoch": 1.5356650369813214, | |
| "grad_norm": 0.031049372628331184, | |
| "learning_rate": 4.2321674815093395e-05, | |
| "loss": 0.1373, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.5419330575404286, | |
| "grad_norm": 2.7801127433776855, | |
| "learning_rate": 4.229033471229786e-05, | |
| "loss": 0.1551, | |
| "step": 24600 | |
| }, | |
| { | |
| "epoch": 1.5482010780995363, | |
| "grad_norm": 0.07833578437566757, | |
| "learning_rate": 4.2258994609502324e-05, | |
| "loss": 0.1379, | |
| "step": 24700 | |
| }, | |
| { | |
| "epoch": 1.5544690986586436, | |
| "grad_norm": 0.38435450196266174, | |
| "learning_rate": 4.222765450670678e-05, | |
| "loss": 0.1436, | |
| "step": 24800 | |
| }, | |
| { | |
| "epoch": 1.560737119217751, | |
| "grad_norm": 0.0331881120800972, | |
| "learning_rate": 4.2196314403911245e-05, | |
| "loss": 0.1299, | |
| "step": 24900 | |
| }, | |
| { | |
| "epoch": 1.5670051397768585, | |
| "grad_norm": 0.05836120992898941, | |
| "learning_rate": 4.216497430111571e-05, | |
| "loss": 0.0855, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.5732731603359658, | |
| "grad_norm": 0.09680526703596115, | |
| "learning_rate": 4.213363419832017e-05, | |
| "loss": 0.1713, | |
| "step": 25100 | |
| }, | |
| { | |
| "epoch": 1.5795411808950734, | |
| "grad_norm": 6.43214750289917, | |
| "learning_rate": 4.210229409552464e-05, | |
| "loss": 0.084, | |
| "step": 25200 | |
| }, | |
| { | |
| "epoch": 1.5858092014541807, | |
| "grad_norm": 18.783109664916992, | |
| "learning_rate": 4.20709539927291e-05, | |
| "loss": 0.1622, | |
| "step": 25300 | |
| }, | |
| { | |
| "epoch": 1.5920772220132882, | |
| "grad_norm": 16.250749588012695, | |
| "learning_rate": 4.203961388993356e-05, | |
| "loss": 0.1102, | |
| "step": 25400 | |
| }, | |
| { | |
| "epoch": 1.5983452425723956, | |
| "grad_norm": 3.157179117202759, | |
| "learning_rate": 4.200827378713802e-05, | |
| "loss": 0.1162, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.604613263131503, | |
| "grad_norm": 3.886101245880127, | |
| "learning_rate": 4.197693368434249e-05, | |
| "loss": 0.0922, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 1.6108812836906106, | |
| "grad_norm": 0.02459394559264183, | |
| "learning_rate": 4.194559358154695e-05, | |
| "loss": 0.1432, | |
| "step": 25700 | |
| }, | |
| { | |
| "epoch": 1.6171493042497178, | |
| "grad_norm": 5.012412071228027, | |
| "learning_rate": 4.1914253478751415e-05, | |
| "loss": 0.1231, | |
| "step": 25800 | |
| }, | |
| { | |
| "epoch": 1.6234173248088255, | |
| "grad_norm": 0.9605007171630859, | |
| "learning_rate": 4.188291337595587e-05, | |
| "loss": 0.1137, | |
| "step": 25900 | |
| }, | |
| { | |
| "epoch": 1.6296853453679327, | |
| "grad_norm": 0.05215989425778389, | |
| "learning_rate": 4.1851573273160337e-05, | |
| "loss": 0.1471, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.6359533659270402, | |
| "grad_norm": 0.27536773681640625, | |
| "learning_rate": 4.18202331703648e-05, | |
| "loss": 0.1517, | |
| "step": 26100 | |
| }, | |
| { | |
| "epoch": 1.6422213864861477, | |
| "grad_norm": 0.015393884852528572, | |
| "learning_rate": 4.1788893067569265e-05, | |
| "loss": 0.1523, | |
| "step": 26200 | |
| }, | |
| { | |
| "epoch": 1.6484894070452552, | |
| "grad_norm": 0.15863627195358276, | |
| "learning_rate": 4.175755296477373e-05, | |
| "loss": 0.1426, | |
| "step": 26300 | |
| }, | |
| { | |
| "epoch": 1.6547574276043626, | |
| "grad_norm": 3.170154571533203, | |
| "learning_rate": 4.1726212861978186e-05, | |
| "loss": 0.1801, | |
| "step": 26400 | |
| }, | |
| { | |
| "epoch": 1.6610254481634699, | |
| "grad_norm": 0.04839281737804413, | |
| "learning_rate": 4.169487275918265e-05, | |
| "loss": 0.119, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.6672934687225776, | |
| "grad_norm": 0.04801773279905319, | |
| "learning_rate": 4.1663532656387114e-05, | |
| "loss": 0.1377, | |
| "step": 26600 | |
| }, | |
| { | |
| "epoch": 1.6735614892816848, | |
| "grad_norm": 0.031017405912280083, | |
| "learning_rate": 4.163219255359158e-05, | |
| "loss": 0.1264, | |
| "step": 26700 | |
| }, | |
| { | |
| "epoch": 1.6798295098407923, | |
| "grad_norm": 0.05843241140246391, | |
| "learning_rate": 4.160085245079604e-05, | |
| "loss": 0.1027, | |
| "step": 26800 | |
| }, | |
| { | |
| "epoch": 1.6860975303998997, | |
| "grad_norm": 0.020908353850245476, | |
| "learning_rate": 4.156951234800051e-05, | |
| "loss": 0.1489, | |
| "step": 26900 | |
| }, | |
| { | |
| "epoch": 1.692365550959007, | |
| "grad_norm": 0.36993488669395447, | |
| "learning_rate": 4.1538172245204964e-05, | |
| "loss": 0.1221, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.6986335715181147, | |
| "grad_norm": 0.009281585924327374, | |
| "learning_rate": 4.150683214240943e-05, | |
| "loss": 0.1226, | |
| "step": 27100 | |
| }, | |
| { | |
| "epoch": 1.704901592077222, | |
| "grad_norm": 0.9608448147773743, | |
| "learning_rate": 4.147549203961389e-05, | |
| "loss": 0.1278, | |
| "step": 27200 | |
| }, | |
| { | |
| "epoch": 1.7111696126363296, | |
| "grad_norm": 3.3816165924072266, | |
| "learning_rate": 4.1444151936818356e-05, | |
| "loss": 0.1606, | |
| "step": 27300 | |
| }, | |
| { | |
| "epoch": 1.7174376331954369, | |
| "grad_norm": 0.035224977880716324, | |
| "learning_rate": 4.141281183402282e-05, | |
| "loss": 0.0929, | |
| "step": 27400 | |
| }, | |
| { | |
| "epoch": 1.7237056537545443, | |
| "grad_norm": 0.030741138383746147, | |
| "learning_rate": 4.138147173122728e-05, | |
| "loss": 0.1115, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.7299736743136518, | |
| "grad_norm": 0.7804316878318787, | |
| "learning_rate": 4.135013162843174e-05, | |
| "loss": 0.1099, | |
| "step": 27600 | |
| }, | |
| { | |
| "epoch": 1.736241694872759, | |
| "grad_norm": 12.860640525817871, | |
| "learning_rate": 4.1318791525636206e-05, | |
| "loss": 0.1177, | |
| "step": 27700 | |
| }, | |
| { | |
| "epoch": 1.7425097154318667, | |
| "grad_norm": 0.01669987104833126, | |
| "learning_rate": 4.128745142284067e-05, | |
| "loss": 0.1323, | |
| "step": 27800 | |
| }, | |
| { | |
| "epoch": 1.748777735990974, | |
| "grad_norm": 0.06566524505615234, | |
| "learning_rate": 4.1256111320045134e-05, | |
| "loss": 0.1587, | |
| "step": 27900 | |
| }, | |
| { | |
| "epoch": 1.7550457565500814, | |
| "grad_norm": 0.0271492600440979, | |
| "learning_rate": 4.12247712172496e-05, | |
| "loss": 0.0951, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.761313777109189, | |
| "grad_norm": 0.26668888330459595, | |
| "learning_rate": 4.1193431114454056e-05, | |
| "loss": 0.1226, | |
| "step": 28100 | |
| }, | |
| { | |
| "epoch": 1.7675817976682964, | |
| "grad_norm": 2.946155309677124, | |
| "learning_rate": 4.116209101165852e-05, | |
| "loss": 0.1123, | |
| "step": 28200 | |
| }, | |
| { | |
| "epoch": 1.7738498182274038, | |
| "grad_norm": 0.024197395890951157, | |
| "learning_rate": 4.1130750908862984e-05, | |
| "loss": 0.1279, | |
| "step": 28300 | |
| }, | |
| { | |
| "epoch": 1.780117838786511, | |
| "grad_norm": 0.032337360084056854, | |
| "learning_rate": 4.109941080606745e-05, | |
| "loss": 0.1505, | |
| "step": 28400 | |
| }, | |
| { | |
| "epoch": 1.7863858593456188, | |
| "grad_norm": 0.019564684480428696, | |
| "learning_rate": 4.106807070327191e-05, | |
| "loss": 0.104, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.792653879904726, | |
| "grad_norm": 0.028655577450990677, | |
| "learning_rate": 4.103673060047637e-05, | |
| "loss": 0.1017, | |
| "step": 28600 | |
| }, | |
| { | |
| "epoch": 1.7989219004638335, | |
| "grad_norm": 10.458086967468262, | |
| "learning_rate": 4.1005390497680833e-05, | |
| "loss": 0.1603, | |
| "step": 28700 | |
| }, | |
| { | |
| "epoch": 1.805189921022941, | |
| "grad_norm": 1.0219428539276123, | |
| "learning_rate": 4.09740503948853e-05, | |
| "loss": 0.1287, | |
| "step": 28800 | |
| }, | |
| { | |
| "epoch": 1.8114579415820484, | |
| "grad_norm": 3.7257254123687744, | |
| "learning_rate": 4.094271029208976e-05, | |
| "loss": 0.1308, | |
| "step": 28900 | |
| }, | |
| { | |
| "epoch": 1.817725962141156, | |
| "grad_norm": 0.07095759361982346, | |
| "learning_rate": 4.0911370189294226e-05, | |
| "loss": 0.1693, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.8239939827002631, | |
| "grad_norm": 0.07273363322019577, | |
| "learning_rate": 4.088003008649868e-05, | |
| "loss": 0.1173, | |
| "step": 29100 | |
| }, | |
| { | |
| "epoch": 1.8302620032593708, | |
| "grad_norm": 0.016207663342356682, | |
| "learning_rate": 4.084868998370315e-05, | |
| "loss": 0.0757, | |
| "step": 29200 | |
| }, | |
| { | |
| "epoch": 1.836530023818478, | |
| "grad_norm": 8.10350227355957, | |
| "learning_rate": 4.081734988090761e-05, | |
| "loss": 0.1705, | |
| "step": 29300 | |
| }, | |
| { | |
| "epoch": 1.8427980443775855, | |
| "grad_norm": 0.020618712529540062, | |
| "learning_rate": 4.0786009778112075e-05, | |
| "loss": 0.1505, | |
| "step": 29400 | |
| }, | |
| { | |
| "epoch": 1.849066064936693, | |
| "grad_norm": 0.01778154820203781, | |
| "learning_rate": 4.075466967531654e-05, | |
| "loss": 0.0881, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.8553340854958005, | |
| "grad_norm": 0.3636402487754822, | |
| "learning_rate": 4.0723329572521004e-05, | |
| "loss": 0.124, | |
| "step": 29600 | |
| }, | |
| { | |
| "epoch": 1.861602106054908, | |
| "grad_norm": 0.053270913660526276, | |
| "learning_rate": 4.069198946972546e-05, | |
| "loss": 0.1033, | |
| "step": 29700 | |
| }, | |
| { | |
| "epoch": 1.8678701266140152, | |
| "grad_norm": 0.14825250208377838, | |
| "learning_rate": 4.0660649366929925e-05, | |
| "loss": 0.1336, | |
| "step": 29800 | |
| }, | |
| { | |
| "epoch": 1.8741381471731229, | |
| "grad_norm": 0.5773608684539795, | |
| "learning_rate": 4.062930926413439e-05, | |
| "loss": 0.1577, | |
| "step": 29900 | |
| }, | |
| { | |
| "epoch": 1.8804061677322301, | |
| "grad_norm": 0.04096854850649834, | |
| "learning_rate": 4.059796916133885e-05, | |
| "loss": 0.1389, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.8866741882913376, | |
| "grad_norm": 0.1584090292453766, | |
| "learning_rate": 4.056662905854332e-05, | |
| "loss": 0.1156, | |
| "step": 30100 | |
| }, | |
| { | |
| "epoch": 1.892942208850445, | |
| "grad_norm": 0.025279894471168518, | |
| "learning_rate": 4.0535288955747775e-05, | |
| "loss": 0.1698, | |
| "step": 30200 | |
| }, | |
| { | |
| "epoch": 1.8992102294095523, | |
| "grad_norm": 0.01919800415635109, | |
| "learning_rate": 4.050394885295224e-05, | |
| "loss": 0.1191, | |
| "step": 30300 | |
| }, | |
| { | |
| "epoch": 1.90547824996866, | |
| "grad_norm": 0.6554661989212036, | |
| "learning_rate": 4.04726087501567e-05, | |
| "loss": 0.1015, | |
| "step": 30400 | |
| }, | |
| { | |
| "epoch": 1.9117462705277672, | |
| "grad_norm": 10.302351951599121, | |
| "learning_rate": 4.044126864736116e-05, | |
| "loss": 0.1468, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.9180142910868747, | |
| "grad_norm": 0.019414065405726433, | |
| "learning_rate": 4.040992854456563e-05, | |
| "loss": 0.1338, | |
| "step": 30600 | |
| }, | |
| { | |
| "epoch": 1.9242823116459822, | |
| "grad_norm": 0.8314793705940247, | |
| "learning_rate": 4.0378588441770095e-05, | |
| "loss": 0.1011, | |
| "step": 30700 | |
| }, | |
| { | |
| "epoch": 1.9305503322050896, | |
| "grad_norm": 6.409970283508301, | |
| "learning_rate": 4.034724833897455e-05, | |
| "loss": 0.1082, | |
| "step": 30800 | |
| }, | |
| { | |
| "epoch": 1.9368183527641971, | |
| "grad_norm": 0.019885946065187454, | |
| "learning_rate": 4.0315908236179017e-05, | |
| "loss": 0.1341, | |
| "step": 30900 | |
| }, | |
| { | |
| "epoch": 1.9430863733233044, | |
| "grad_norm": 19.638233184814453, | |
| "learning_rate": 4.0284568133383474e-05, | |
| "loss": 0.1709, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.949354393882412, | |
| "grad_norm": 0.017337223514914513, | |
| "learning_rate": 4.0253228030587945e-05, | |
| "loss": 0.1019, | |
| "step": 31100 | |
| }, | |
| { | |
| "epoch": 1.9556224144415193, | |
| "grad_norm": 0.090455062687397, | |
| "learning_rate": 4.022188792779241e-05, | |
| "loss": 0.1393, | |
| "step": 31200 | |
| }, | |
| { | |
| "epoch": 1.9618904350006268, | |
| "grad_norm": 0.04115147516131401, | |
| "learning_rate": 4.0190547824996866e-05, | |
| "loss": 0.1956, | |
| "step": 31300 | |
| }, | |
| { | |
| "epoch": 1.9681584555597342, | |
| "grad_norm": 0.009835952892899513, | |
| "learning_rate": 4.015920772220133e-05, | |
| "loss": 0.0807, | |
| "step": 31400 | |
| }, | |
| { | |
| "epoch": 1.9744264761188417, | |
| "grad_norm": 24.049352645874023, | |
| "learning_rate": 4.0127867619405794e-05, | |
| "loss": 0.1197, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.9806944966779492, | |
| "grad_norm": 0.025476396083831787, | |
| "learning_rate": 4.009652751661025e-05, | |
| "loss": 0.1464, | |
| "step": 31600 | |
| }, | |
| { | |
| "epoch": 1.9869625172370564, | |
| "grad_norm": 0.023265615105628967, | |
| "learning_rate": 4.006518741381472e-05, | |
| "loss": 0.1475, | |
| "step": 31700 | |
| }, | |
| { | |
| "epoch": 1.993230537796164, | |
| "grad_norm": 0.005244415253400803, | |
| "learning_rate": 4.003384731101919e-05, | |
| "loss": 0.1118, | |
| "step": 31800 | |
| }, | |
| { | |
| "epoch": 1.9994985583552713, | |
| "grad_norm": 0.0228890310972929, | |
| "learning_rate": 4.0002507208223644e-05, | |
| "loss": 0.1351, | |
| "step": 31900 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_f1": 0.9593731449578206, | |
| "eval_loss": 0.2004200518131256, | |
| "eval_runtime": 46.5064, | |
| "eval_samples_per_second": 304.926, | |
| "eval_steps_per_second": 38.124, | |
| "step": 31908 | |
| }, | |
| { | |
| "epoch": 2.005766578914379, | |
| "grad_norm": 1.752765417098999, | |
| "learning_rate": 3.997116710542811e-05, | |
| "loss": 0.095, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.0120345994734863, | |
| "grad_norm": 0.015561070293188095, | |
| "learning_rate": 3.9939827002632565e-05, | |
| "loss": 0.0528, | |
| "step": 32100 | |
| }, | |
| { | |
| "epoch": 2.0183026200325935, | |
| "grad_norm": 0.008636975660920143, | |
| "learning_rate": 3.9908486899837036e-05, | |
| "loss": 0.0472, | |
| "step": 32200 | |
| }, | |
| { | |
| "epoch": 2.024570640591701, | |
| "grad_norm": 0.039465419948101044, | |
| "learning_rate": 3.98771467970415e-05, | |
| "loss": 0.0584, | |
| "step": 32300 | |
| }, | |
| { | |
| "epoch": 2.0308386611508085, | |
| "grad_norm": 0.01176067441701889, | |
| "learning_rate": 3.984580669424596e-05, | |
| "loss": 0.0716, | |
| "step": 32400 | |
| }, | |
| { | |
| "epoch": 2.037106681709916, | |
| "grad_norm": 0.01852802000939846, | |
| "learning_rate": 3.981446659145042e-05, | |
| "loss": 0.0746, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.0433747022690234, | |
| "grad_norm": 0.11856148391962051, | |
| "learning_rate": 3.9783126488654886e-05, | |
| "loss": 0.0442, | |
| "step": 32600 | |
| }, | |
| { | |
| "epoch": 2.049642722828131, | |
| "grad_norm": 0.005875643342733383, | |
| "learning_rate": 3.975178638585934e-05, | |
| "loss": 0.1037, | |
| "step": 32700 | |
| }, | |
| { | |
| "epoch": 2.0559107433872383, | |
| "grad_norm": 0.8473520874977112, | |
| "learning_rate": 3.9720446283063814e-05, | |
| "loss": 0.0918, | |
| "step": 32800 | |
| }, | |
| { | |
| "epoch": 2.0621787639463456, | |
| "grad_norm": 0.027008237317204475, | |
| "learning_rate": 3.968910618026827e-05, | |
| "loss": 0.1079, | |
| "step": 32900 | |
| }, | |
| { | |
| "epoch": 2.0684467845054533, | |
| "grad_norm": 0.016751717776060104, | |
| "learning_rate": 3.9657766077472736e-05, | |
| "loss": 0.0849, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.0747148050645605, | |
| "grad_norm": 4.002385139465332, | |
| "learning_rate": 3.96264259746772e-05, | |
| "loss": 0.0566, | |
| "step": 33100 | |
| }, | |
| { | |
| "epoch": 2.080982825623668, | |
| "grad_norm": 0.008037238381803036, | |
| "learning_rate": 3.959508587188166e-05, | |
| "loss": 0.0456, | |
| "step": 33200 | |
| }, | |
| { | |
| "epoch": 2.0872508461827755, | |
| "grad_norm": 0.6050384640693665, | |
| "learning_rate": 3.956374576908613e-05, | |
| "loss": 0.0381, | |
| "step": 33300 | |
| }, | |
| { | |
| "epoch": 2.0935188667418827, | |
| "grad_norm": 0.024053404107689857, | |
| "learning_rate": 3.953240566629059e-05, | |
| "loss": 0.0905, | |
| "step": 33400 | |
| }, | |
| { | |
| "epoch": 2.0997868873009904, | |
| "grad_norm": 0.007491984870284796, | |
| "learning_rate": 3.950106556349505e-05, | |
| "loss": 0.0497, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.1060549078600976, | |
| "grad_norm": 18.2802734375, | |
| "learning_rate": 3.946972546069951e-05, | |
| "loss": 0.0846, | |
| "step": 33600 | |
| }, | |
| { | |
| "epoch": 2.1123229284192053, | |
| "grad_norm": 0.007532674353569746, | |
| "learning_rate": 3.943838535790397e-05, | |
| "loss": 0.0959, | |
| "step": 33700 | |
| }, | |
| { | |
| "epoch": 2.1185909489783126, | |
| "grad_norm": 0.030742375180125237, | |
| "learning_rate": 3.9407045255108435e-05, | |
| "loss": 0.0926, | |
| "step": 33800 | |
| }, | |
| { | |
| "epoch": 2.1248589695374203, | |
| "grad_norm": 0.017789393663406372, | |
| "learning_rate": 3.9375705152312906e-05, | |
| "loss": 0.0713, | |
| "step": 33900 | |
| }, | |
| { | |
| "epoch": 2.1311269900965275, | |
| "grad_norm": 0.013687222264707088, | |
| "learning_rate": 3.934436504951736e-05, | |
| "loss": 0.0683, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.1373950106556348, | |
| "grad_norm": 0.04924164339900017, | |
| "learning_rate": 3.931302494672183e-05, | |
| "loss": 0.0751, | |
| "step": 34100 | |
| }, | |
| { | |
| "epoch": 2.1436630312147424, | |
| "grad_norm": 0.08258863538503647, | |
| "learning_rate": 3.928168484392629e-05, | |
| "loss": 0.0641, | |
| "step": 34200 | |
| }, | |
| { | |
| "epoch": 2.1499310517738497, | |
| "grad_norm": 0.0224623903632164, | |
| "learning_rate": 3.925034474113075e-05, | |
| "loss": 0.1167, | |
| "step": 34300 | |
| }, | |
| { | |
| "epoch": 2.1561990723329574, | |
| "grad_norm": 0.0296242143958807, | |
| "learning_rate": 3.921900463833522e-05, | |
| "loss": 0.0796, | |
| "step": 34400 | |
| }, | |
| { | |
| "epoch": 2.1624670928920646, | |
| "grad_norm": 0.062432464212179184, | |
| "learning_rate": 3.9187664535539683e-05, | |
| "loss": 0.0708, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.1687351134511723, | |
| "grad_norm": 0.004165360238403082, | |
| "learning_rate": 3.915632443274414e-05, | |
| "loss": 0.0653, | |
| "step": 34600 | |
| }, | |
| { | |
| "epoch": 2.1750031340102796, | |
| "grad_norm": 0.004024056252092123, | |
| "learning_rate": 3.9124984329948605e-05, | |
| "loss": 0.0702, | |
| "step": 34700 | |
| }, | |
| { | |
| "epoch": 2.181271154569387, | |
| "grad_norm": 0.019855454564094543, | |
| "learning_rate": 3.909364422715306e-05, | |
| "loss": 0.0693, | |
| "step": 34800 | |
| }, | |
| { | |
| "epoch": 2.1875391751284945, | |
| "grad_norm": 0.05669284239411354, | |
| "learning_rate": 3.9062304124357526e-05, | |
| "loss": 0.0853, | |
| "step": 34900 | |
| }, | |
| { | |
| "epoch": 2.1938071956876017, | |
| "grad_norm": 0.0027710208669304848, | |
| "learning_rate": 3.9030964021562e-05, | |
| "loss": 0.0746, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.2000752162467094, | |
| "grad_norm": 0.018983645364642143, | |
| "learning_rate": 3.8999623918766455e-05, | |
| "loss": 0.1337, | |
| "step": 35100 | |
| }, | |
| { | |
| "epoch": 2.2063432368058167, | |
| "grad_norm": 0.030805960297584534, | |
| "learning_rate": 3.896828381597092e-05, | |
| "loss": 0.1038, | |
| "step": 35200 | |
| }, | |
| { | |
| "epoch": 2.212611257364924, | |
| "grad_norm": 0.01616552099585533, | |
| "learning_rate": 3.893694371317538e-05, | |
| "loss": 0.1092, | |
| "step": 35300 | |
| }, | |
| { | |
| "epoch": 2.2188792779240316, | |
| "grad_norm": 0.022278735414147377, | |
| "learning_rate": 3.890560361037984e-05, | |
| "loss": 0.0506, | |
| "step": 35400 | |
| }, | |
| { | |
| "epoch": 2.225147298483139, | |
| "grad_norm": 0.014077126048505306, | |
| "learning_rate": 3.887426350758431e-05, | |
| "loss": 0.0601, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.2314153190422465, | |
| "grad_norm": 0.006781439762562513, | |
| "learning_rate": 3.884292340478877e-05, | |
| "loss": 0.0535, | |
| "step": 35600 | |
| }, | |
| { | |
| "epoch": 2.237683339601354, | |
| "grad_norm": 0.030124608427286148, | |
| "learning_rate": 3.881158330199323e-05, | |
| "loss": 0.0789, | |
| "step": 35700 | |
| }, | |
| { | |
| "epoch": 2.2439513601604615, | |
| "grad_norm": 0.07032699137926102, | |
| "learning_rate": 3.8780243199197696e-05, | |
| "loss": 0.0812, | |
| "step": 35800 | |
| }, | |
| { | |
| "epoch": 2.2502193807195687, | |
| "grad_norm": 0.03821179270744324, | |
| "learning_rate": 3.8748903096402154e-05, | |
| "loss": 0.0912, | |
| "step": 35900 | |
| }, | |
| { | |
| "epoch": 2.256487401278676, | |
| "grad_norm": 27.029489517211914, | |
| "learning_rate": 3.871756299360662e-05, | |
| "loss": 0.0908, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.2627554218377837, | |
| "grad_norm": 0.05968330428004265, | |
| "learning_rate": 3.868622289081109e-05, | |
| "loss": 0.0867, | |
| "step": 36100 | |
| }, | |
| { | |
| "epoch": 2.269023442396891, | |
| "grad_norm": 0.05663014203310013, | |
| "learning_rate": 3.8654882788015546e-05, | |
| "loss": 0.1178, | |
| "step": 36200 | |
| }, | |
| { | |
| "epoch": 2.2752914629559986, | |
| "grad_norm": 0.04175253212451935, | |
| "learning_rate": 3.862354268522001e-05, | |
| "loss": 0.0605, | |
| "step": 36300 | |
| }, | |
| { | |
| "epoch": 2.281559483515106, | |
| "grad_norm": 0.030418751761317253, | |
| "learning_rate": 3.8592202582424474e-05, | |
| "loss": 0.1176, | |
| "step": 36400 | |
| }, | |
| { | |
| "epoch": 2.2878275040742135, | |
| "grad_norm": 0.013122400268912315, | |
| "learning_rate": 3.856086247962893e-05, | |
| "loss": 0.0679, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.294095524633321, | |
| "grad_norm": 0.009739202447235584, | |
| "learning_rate": 3.8529522376833396e-05, | |
| "loss": 0.0451, | |
| "step": 36600 | |
| }, | |
| { | |
| "epoch": 2.300363545192428, | |
| "grad_norm": 0.11405142396688461, | |
| "learning_rate": 3.849818227403786e-05, | |
| "loss": 0.0814, | |
| "step": 36700 | |
| }, | |
| { | |
| "epoch": 2.3066315657515357, | |
| "grad_norm": 0.22820931673049927, | |
| "learning_rate": 3.8466842171242324e-05, | |
| "loss": 0.0756, | |
| "step": 36800 | |
| }, | |
| { | |
| "epoch": 2.312899586310643, | |
| "grad_norm": 0.051908042281866074, | |
| "learning_rate": 3.843550206844679e-05, | |
| "loss": 0.0892, | |
| "step": 36900 | |
| }, | |
| { | |
| "epoch": 2.3191676068697507, | |
| "grad_norm": 0.0027861667331308126, | |
| "learning_rate": 3.8404161965651245e-05, | |
| "loss": 0.0343, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.325435627428858, | |
| "grad_norm": 0.5086230635643005, | |
| "learning_rate": 3.837282186285571e-05, | |
| "loss": 0.0626, | |
| "step": 37100 | |
| }, | |
| { | |
| "epoch": 2.3317036479879656, | |
| "grad_norm": 15.515472412109375, | |
| "learning_rate": 3.834148176006018e-05, | |
| "loss": 0.0835, | |
| "step": 37200 | |
| }, | |
| { | |
| "epoch": 2.337971668547073, | |
| "grad_norm": 19.288026809692383, | |
| "learning_rate": 3.831014165726464e-05, | |
| "loss": 0.0599, | |
| "step": 37300 | |
| }, | |
| { | |
| "epoch": 2.34423968910618, | |
| "grad_norm": 0.016877690330147743, | |
| "learning_rate": 3.82788015544691e-05, | |
| "loss": 0.0452, | |
| "step": 37400 | |
| }, | |
| { | |
| "epoch": 2.3505077096652878, | |
| "grad_norm": 0.017148835584521294, | |
| "learning_rate": 3.824746145167356e-05, | |
| "loss": 0.0469, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.356775730224395, | |
| "grad_norm": 0.027410952374339104, | |
| "learning_rate": 3.821612134887802e-05, | |
| "loss": 0.0925, | |
| "step": 37600 | |
| }, | |
| { | |
| "epoch": 2.3630437507835027, | |
| "grad_norm": 0.0017053181072697043, | |
| "learning_rate": 3.818478124608249e-05, | |
| "loss": 0.0563, | |
| "step": 37700 | |
| }, | |
| { | |
| "epoch": 2.36931177134261, | |
| "grad_norm": 3.3930346965789795, | |
| "learning_rate": 3.815344114328695e-05, | |
| "loss": 0.081, | |
| "step": 37800 | |
| }, | |
| { | |
| "epoch": 2.3755797919017176, | |
| "grad_norm": 2.7775776386260986, | |
| "learning_rate": 3.8122101040491415e-05, | |
| "loss": 0.0987, | |
| "step": 37900 | |
| }, | |
| { | |
| "epoch": 2.381847812460825, | |
| "grad_norm": 40.191810607910156, | |
| "learning_rate": 3.809076093769588e-05, | |
| "loss": 0.0573, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.388115833019932, | |
| "grad_norm": 0.00726683996617794, | |
| "learning_rate": 3.805942083490034e-05, | |
| "loss": 0.0849, | |
| "step": 38100 | |
| }, | |
| { | |
| "epoch": 2.39438385357904, | |
| "grad_norm": 0.0060453517362475395, | |
| "learning_rate": 3.80280807321048e-05, | |
| "loss": 0.0571, | |
| "step": 38200 | |
| }, | |
| { | |
| "epoch": 2.400651874138147, | |
| "grad_norm": 0.010360270738601685, | |
| "learning_rate": 3.799674062930927e-05, | |
| "loss": 0.1031, | |
| "step": 38300 | |
| }, | |
| { | |
| "epoch": 2.4069198946972548, | |
| "grad_norm": 0.09773801267147064, | |
| "learning_rate": 3.796540052651373e-05, | |
| "loss": 0.0702, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 2.413187915256362, | |
| "grad_norm": 0.03975152224302292, | |
| "learning_rate": 3.793406042371819e-05, | |
| "loss": 0.0907, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.4194559358154697, | |
| "grad_norm": 2.2103185653686523, | |
| "learning_rate": 3.790272032092265e-05, | |
| "loss": 0.0747, | |
| "step": 38600 | |
| }, | |
| { | |
| "epoch": 2.425723956374577, | |
| "grad_norm": 0.008638462983071804, | |
| "learning_rate": 3.7871380218127115e-05, | |
| "loss": 0.0699, | |
| "step": 38700 | |
| }, | |
| { | |
| "epoch": 2.431991976933684, | |
| "grad_norm": 0.027898641303181648, | |
| "learning_rate": 3.784004011533158e-05, | |
| "loss": 0.0952, | |
| "step": 38800 | |
| }, | |
| { | |
| "epoch": 2.438259997492792, | |
| "grad_norm": 0.015264123678207397, | |
| "learning_rate": 3.780870001253604e-05, | |
| "loss": 0.0529, | |
| "step": 38900 | |
| }, | |
| { | |
| "epoch": 2.444528018051899, | |
| "grad_norm": 0.23222191631793976, | |
| "learning_rate": 3.777735990974051e-05, | |
| "loss": 0.0952, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.450796038611007, | |
| "grad_norm": 0.02441653236746788, | |
| "learning_rate": 3.774601980694497e-05, | |
| "loss": 0.099, | |
| "step": 39100 | |
| }, | |
| { | |
| "epoch": 2.457064059170114, | |
| "grad_norm": 0.024442043155431747, | |
| "learning_rate": 3.771467970414943e-05, | |
| "loss": 0.0749, | |
| "step": 39200 | |
| }, | |
| { | |
| "epoch": 2.4633320797292217, | |
| "grad_norm": 0.09841648489236832, | |
| "learning_rate": 3.768333960135389e-05, | |
| "loss": 0.0856, | |
| "step": 39300 | |
| }, | |
| { | |
| "epoch": 2.469600100288329, | |
| "grad_norm": 0.002874793019145727, | |
| "learning_rate": 3.765199949855836e-05, | |
| "loss": 0.0586, | |
| "step": 39400 | |
| }, | |
| { | |
| "epoch": 2.4758681208474362, | |
| "grad_norm": 0.00998006109148264, | |
| "learning_rate": 3.762065939576282e-05, | |
| "loss": 0.0746, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.482136141406544, | |
| "grad_norm": 0.010123957879841328, | |
| "learning_rate": 3.7589319292967285e-05, | |
| "loss": 0.0429, | |
| "step": 39600 | |
| }, | |
| { | |
| "epoch": 2.488404161965651, | |
| "grad_norm": 0.004430798348039389, | |
| "learning_rate": 3.755797919017174e-05, | |
| "loss": 0.074, | |
| "step": 39700 | |
| }, | |
| { | |
| "epoch": 2.494672182524759, | |
| "grad_norm": 0.05416274443268776, | |
| "learning_rate": 3.7526639087376206e-05, | |
| "loss": 0.0738, | |
| "step": 39800 | |
| }, | |
| { | |
| "epoch": 2.500940203083866, | |
| "grad_norm": 0.03083961270749569, | |
| "learning_rate": 3.749529898458067e-05, | |
| "loss": 0.1021, | |
| "step": 39900 | |
| }, | |
| { | |
| "epoch": 2.507208223642974, | |
| "grad_norm": 18.929054260253906, | |
| "learning_rate": 3.7463958881785134e-05, | |
| "loss": 0.0876, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.513476244202081, | |
| "grad_norm": 0.0038987034931778908, | |
| "learning_rate": 3.74326187789896e-05, | |
| "loss": 0.097, | |
| "step": 40100 | |
| }, | |
| { | |
| "epoch": 2.5197442647611883, | |
| "grad_norm": 0.6230727434158325, | |
| "learning_rate": 3.7401278676194056e-05, | |
| "loss": 0.082, | |
| "step": 40200 | |
| }, | |
| { | |
| "epoch": 2.526012285320296, | |
| "grad_norm": 0.029838476330041885, | |
| "learning_rate": 3.736993857339852e-05, | |
| "loss": 0.0909, | |
| "step": 40300 | |
| }, | |
| { | |
| "epoch": 2.5322803058794032, | |
| "grad_norm": 0.11969407647848129, | |
| "learning_rate": 3.7338598470602984e-05, | |
| "loss": 0.032, | |
| "step": 40400 | |
| }, | |
| { | |
| "epoch": 2.5385483264385105, | |
| "grad_norm": 8.733318328857422, | |
| "learning_rate": 3.730725836780745e-05, | |
| "loss": 0.05, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.544816346997618, | |
| "grad_norm": 0.004856993909925222, | |
| "learning_rate": 3.727591826501191e-05, | |
| "loss": 0.0687, | |
| "step": 40600 | |
| }, | |
| { | |
| "epoch": 2.551084367556726, | |
| "grad_norm": 0.010757598094642162, | |
| "learning_rate": 3.7244578162216376e-05, | |
| "loss": 0.0689, | |
| "step": 40700 | |
| }, | |
| { | |
| "epoch": 2.557352388115833, | |
| "grad_norm": 0.035573601722717285, | |
| "learning_rate": 3.7213238059420834e-05, | |
| "loss": 0.0571, | |
| "step": 40800 | |
| }, | |
| { | |
| "epoch": 2.5636204086749403, | |
| "grad_norm": 0.40876084566116333, | |
| "learning_rate": 3.71818979566253e-05, | |
| "loss": 0.04, | |
| "step": 40900 | |
| }, | |
| { | |
| "epoch": 2.569888429234048, | |
| "grad_norm": 5.089639186859131, | |
| "learning_rate": 3.715055785382976e-05, | |
| "loss": 0.0527, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.5761564497931553, | |
| "grad_norm": 0.06109604611992836, | |
| "learning_rate": 3.7119217751034226e-05, | |
| "loss": 0.0965, | |
| "step": 41100 | |
| }, | |
| { | |
| "epoch": 2.5824244703522625, | |
| "grad_norm": 0.1139836385846138, | |
| "learning_rate": 3.708787764823869e-05, | |
| "loss": 0.1114, | |
| "step": 41200 | |
| }, | |
| { | |
| "epoch": 2.58869249091137, | |
| "grad_norm": 0.013192636892199516, | |
| "learning_rate": 3.705653754544315e-05, | |
| "loss": 0.0919, | |
| "step": 41300 | |
| }, | |
| { | |
| "epoch": 2.5949605114704775, | |
| "grad_norm": 0.03195355087518692, | |
| "learning_rate": 3.702519744264761e-05, | |
| "loss": 0.0687, | |
| "step": 41400 | |
| }, | |
| { | |
| "epoch": 2.601228532029585, | |
| "grad_norm": 1.656760334968567, | |
| "learning_rate": 3.6993857339852076e-05, | |
| "loss": 0.1247, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 2.6074965525886924, | |
| "grad_norm": 0.00459561450406909, | |
| "learning_rate": 3.696251723705654e-05, | |
| "loss": 0.0656, | |
| "step": 41600 | |
| }, | |
| { | |
| "epoch": 2.6137645731478, | |
| "grad_norm": 0.11038025468587875, | |
| "learning_rate": 3.6931177134261004e-05, | |
| "loss": 0.0795, | |
| "step": 41700 | |
| }, | |
| { | |
| "epoch": 2.6200325937069073, | |
| "grad_norm": 0.1765427589416504, | |
| "learning_rate": 3.689983703146547e-05, | |
| "loss": 0.0859, | |
| "step": 41800 | |
| }, | |
| { | |
| "epoch": 2.6263006142660146, | |
| "grad_norm": 0.014666881412267685, | |
| "learning_rate": 3.6868496928669925e-05, | |
| "loss": 0.0864, | |
| "step": 41900 | |
| }, | |
| { | |
| "epoch": 2.6325686348251223, | |
| "grad_norm": 0.014499700628221035, | |
| "learning_rate": 3.683715682587439e-05, | |
| "loss": 0.0658, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.6388366553842295, | |
| "grad_norm": 0.01551514770835638, | |
| "learning_rate": 3.6805816723078854e-05, | |
| "loss": 0.0351, | |
| "step": 42100 | |
| }, | |
| { | |
| "epoch": 2.645104675943337, | |
| "grad_norm": 0.009609305299818516, | |
| "learning_rate": 3.677447662028332e-05, | |
| "loss": 0.1173, | |
| "step": 42200 | |
| }, | |
| { | |
| "epoch": 2.6513726965024444, | |
| "grad_norm": 0.019696684554219246, | |
| "learning_rate": 3.674313651748778e-05, | |
| "loss": 0.0946, | |
| "step": 42300 | |
| }, | |
| { | |
| "epoch": 2.657640717061552, | |
| "grad_norm": 0.011365796439349651, | |
| "learning_rate": 3.671179641469224e-05, | |
| "loss": 0.061, | |
| "step": 42400 | |
| }, | |
| { | |
| "epoch": 2.6639087376206594, | |
| "grad_norm": 0.0028541130013763905, | |
| "learning_rate": 3.66804563118967e-05, | |
| "loss": 0.0637, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 2.6701767581797666, | |
| "grad_norm": 0.08456343412399292, | |
| "learning_rate": 3.664911620910117e-05, | |
| "loss": 0.0959, | |
| "step": 42600 | |
| }, | |
| { | |
| "epoch": 2.6764447787388743, | |
| "grad_norm": 1.1794720888137817, | |
| "learning_rate": 3.661777610630563e-05, | |
| "loss": 0.0638, | |
| "step": 42700 | |
| }, | |
| { | |
| "epoch": 2.6827127992979816, | |
| "grad_norm": 0.008910051546990871, | |
| "learning_rate": 3.6586436003510095e-05, | |
| "loss": 0.1023, | |
| "step": 42800 | |
| }, | |
| { | |
| "epoch": 2.6889808198570893, | |
| "grad_norm": 0.10396095365285873, | |
| "learning_rate": 3.655509590071456e-05, | |
| "loss": 0.0964, | |
| "step": 42900 | |
| }, | |
| { | |
| "epoch": 2.6952488404161965, | |
| "grad_norm": 0.006989391054958105, | |
| "learning_rate": 3.652375579791902e-05, | |
| "loss": 0.0608, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.701516860975304, | |
| "grad_norm": 0.05438686162233353, | |
| "learning_rate": 3.649241569512348e-05, | |
| "loss": 0.0974, | |
| "step": 43100 | |
| }, | |
| { | |
| "epoch": 2.7077848815344114, | |
| "grad_norm": 0.006107168737798929, | |
| "learning_rate": 3.6461075592327945e-05, | |
| "loss": 0.093, | |
| "step": 43200 | |
| }, | |
| { | |
| "epoch": 2.7140529020935187, | |
| "grad_norm": 8.148849487304688, | |
| "learning_rate": 3.642973548953241e-05, | |
| "loss": 0.0816, | |
| "step": 43300 | |
| }, | |
| { | |
| "epoch": 2.7203209226526264, | |
| "grad_norm": 0.1799936592578888, | |
| "learning_rate": 3.639839538673687e-05, | |
| "loss": 0.0944, | |
| "step": 43400 | |
| }, | |
| { | |
| "epoch": 2.7265889432117336, | |
| "grad_norm": 0.009076522663235664, | |
| "learning_rate": 3.636705528394133e-05, | |
| "loss": 0.0843, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.7328569637708413, | |
| "grad_norm": 16.197790145874023, | |
| "learning_rate": 3.6335715181145795e-05, | |
| "loss": 0.0849, | |
| "step": 43600 | |
| }, | |
| { | |
| "epoch": 2.7391249843299486, | |
| "grad_norm": 0.012713102623820305, | |
| "learning_rate": 3.630437507835026e-05, | |
| "loss": 0.0668, | |
| "step": 43700 | |
| }, | |
| { | |
| "epoch": 2.7453930048890562, | |
| "grad_norm": 0.019433556124567986, | |
| "learning_rate": 3.627303497555472e-05, | |
| "loss": 0.0686, | |
| "step": 43800 | |
| }, | |
| { | |
| "epoch": 2.7516610254481635, | |
| "grad_norm": 5.01052188873291, | |
| "learning_rate": 3.624169487275919e-05, | |
| "loss": 0.0458, | |
| "step": 43900 | |
| }, | |
| { | |
| "epoch": 2.7579290460072707, | |
| "grad_norm": 0.02707609347999096, | |
| "learning_rate": 3.6210354769963644e-05, | |
| "loss": 0.0676, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.7641970665663784, | |
| "grad_norm": 0.006899044383317232, | |
| "learning_rate": 3.617901466716811e-05, | |
| "loss": 0.1024, | |
| "step": 44100 | |
| }, | |
| { | |
| "epoch": 2.7704650871254857, | |
| "grad_norm": 0.13500969111919403, | |
| "learning_rate": 3.614767456437257e-05, | |
| "loss": 0.0909, | |
| "step": 44200 | |
| }, | |
| { | |
| "epoch": 2.7767331076845934, | |
| "grad_norm": 0.2365235686302185, | |
| "learning_rate": 3.611633446157704e-05, | |
| "loss": 0.0627, | |
| "step": 44300 | |
| }, | |
| { | |
| "epoch": 2.7830011282437006, | |
| "grad_norm": 0.03015589341521263, | |
| "learning_rate": 3.60849943587815e-05, | |
| "loss": 0.0684, | |
| "step": 44400 | |
| }, | |
| { | |
| "epoch": 2.7892691488028083, | |
| "grad_norm": 0.06196145713329315, | |
| "learning_rate": 3.6053654255985965e-05, | |
| "loss": 0.0604, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.7955371693619155, | |
| "grad_norm": 0.12457844614982605, | |
| "learning_rate": 3.602231415319042e-05, | |
| "loss": 0.0829, | |
| "step": 44600 | |
| }, | |
| { | |
| "epoch": 2.801805189921023, | |
| "grad_norm": 0.014878248795866966, | |
| "learning_rate": 3.5990974050394886e-05, | |
| "loss": 0.0819, | |
| "step": 44700 | |
| }, | |
| { | |
| "epoch": 2.8080732104801305, | |
| "grad_norm": 3.6133410930633545, | |
| "learning_rate": 3.595963394759935e-05, | |
| "loss": 0.0685, | |
| "step": 44800 | |
| }, | |
| { | |
| "epoch": 2.8143412310392377, | |
| "grad_norm": 4.534252166748047, | |
| "learning_rate": 3.5928293844803814e-05, | |
| "loss": 0.0906, | |
| "step": 44900 | |
| }, | |
| { | |
| "epoch": 2.820609251598345, | |
| "grad_norm": 11.92259407043457, | |
| "learning_rate": 3.589695374200828e-05, | |
| "loss": 0.1371, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.8268772721574527, | |
| "grad_norm": 0.009200111962854862, | |
| "learning_rate": 3.5865613639212736e-05, | |
| "loss": 0.0614, | |
| "step": 45100 | |
| }, | |
| { | |
| "epoch": 2.8331452927165603, | |
| "grad_norm": 0.002684760605916381, | |
| "learning_rate": 3.58342735364172e-05, | |
| "loss": 0.0567, | |
| "step": 45200 | |
| }, | |
| { | |
| "epoch": 2.8394133132756676, | |
| "grad_norm": 6.500457286834717, | |
| "learning_rate": 3.5802933433621664e-05, | |
| "loss": 0.0957, | |
| "step": 45300 | |
| }, | |
| { | |
| "epoch": 2.845681333834775, | |
| "grad_norm": 0.3313663601875305, | |
| "learning_rate": 3.577159333082613e-05, | |
| "loss": 0.1325, | |
| "step": 45400 | |
| }, | |
| { | |
| "epoch": 2.8519493543938825, | |
| "grad_norm": 0.026840632781386375, | |
| "learning_rate": 3.574025322803059e-05, | |
| "loss": 0.0652, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.8582173749529898, | |
| "grad_norm": 0.09741576015949249, | |
| "learning_rate": 3.5708913125235056e-05, | |
| "loss": 0.0825, | |
| "step": 45600 | |
| }, | |
| { | |
| "epoch": 2.864485395512097, | |
| "grad_norm": 0.31416431069374084, | |
| "learning_rate": 3.5677573022439514e-05, | |
| "loss": 0.0796, | |
| "step": 45700 | |
| }, | |
| { | |
| "epoch": 2.8707534160712047, | |
| "grad_norm": 0.1350662261247635, | |
| "learning_rate": 3.564623291964398e-05, | |
| "loss": 0.0711, | |
| "step": 45800 | |
| }, | |
| { | |
| "epoch": 2.8770214366303124, | |
| "grad_norm": 0.02628297358751297, | |
| "learning_rate": 3.561489281684844e-05, | |
| "loss": 0.0867, | |
| "step": 45900 | |
| }, | |
| { | |
| "epoch": 2.8832894571894196, | |
| "grad_norm": 0.004932702984660864, | |
| "learning_rate": 3.5583552714052906e-05, | |
| "loss": 0.0809, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.889557477748527, | |
| "grad_norm": 0.09680982679128647, | |
| "learning_rate": 3.555221261125737e-05, | |
| "loss": 0.092, | |
| "step": 46100 | |
| }, | |
| { | |
| "epoch": 2.8958254983076346, | |
| "grad_norm": 0.060553718358278275, | |
| "learning_rate": 3.552087250846183e-05, | |
| "loss": 0.1095, | |
| "step": 46200 | |
| }, | |
| { | |
| "epoch": 2.902093518866742, | |
| "grad_norm": 0.011483977548778057, | |
| "learning_rate": 3.548953240566629e-05, | |
| "loss": 0.0465, | |
| "step": 46300 | |
| }, | |
| { | |
| "epoch": 2.908361539425849, | |
| "grad_norm": 0.073598712682724, | |
| "learning_rate": 3.5458192302870756e-05, | |
| "loss": 0.0714, | |
| "step": 46400 | |
| }, | |
| { | |
| "epoch": 2.9146295599849568, | |
| "grad_norm": 0.029353119432926178, | |
| "learning_rate": 3.542685220007522e-05, | |
| "loss": 0.0627, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.9208975805440645, | |
| "grad_norm": 0.006514715496450663, | |
| "learning_rate": 3.5395512097279684e-05, | |
| "loss": 0.0898, | |
| "step": 46600 | |
| }, | |
| { | |
| "epoch": 2.9271656011031717, | |
| "grad_norm": 0.031139971688389778, | |
| "learning_rate": 3.536417199448414e-05, | |
| "loss": 0.0594, | |
| "step": 46700 | |
| }, | |
| { | |
| "epoch": 2.933433621662279, | |
| "grad_norm": 0.01661308854818344, | |
| "learning_rate": 3.5332831891688605e-05, | |
| "loss": 0.1293, | |
| "step": 46800 | |
| }, | |
| { | |
| "epoch": 2.9397016422213866, | |
| "grad_norm": 0.08889225125312805, | |
| "learning_rate": 3.530149178889307e-05, | |
| "loss": 0.0453, | |
| "step": 46900 | |
| }, | |
| { | |
| "epoch": 2.945969662780494, | |
| "grad_norm": 0.0317188985645771, | |
| "learning_rate": 3.527015168609753e-05, | |
| "loss": 0.0557, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 2.952237683339601, | |
| "grad_norm": 0.025686800479888916, | |
| "learning_rate": 3.5238811583302e-05, | |
| "loss": 0.0944, | |
| "step": 47100 | |
| }, | |
| { | |
| "epoch": 2.958505703898709, | |
| "grad_norm": 0.020531490445137024, | |
| "learning_rate": 3.520747148050646e-05, | |
| "loss": 0.0578, | |
| "step": 47200 | |
| }, | |
| { | |
| "epoch": 2.964773724457816, | |
| "grad_norm": 0.8992320895195007, | |
| "learning_rate": 3.517613137771092e-05, | |
| "loss": 0.0752, | |
| "step": 47300 | |
| }, | |
| { | |
| "epoch": 2.9710417450169238, | |
| "grad_norm": 0.14482086896896362, | |
| "learning_rate": 3.514479127491538e-05, | |
| "loss": 0.1128, | |
| "step": 47400 | |
| }, | |
| { | |
| "epoch": 2.977309765576031, | |
| "grad_norm": 0.03547542169690132, | |
| "learning_rate": 3.511345117211985e-05, | |
| "loss": 0.0353, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 2.9835777861351387, | |
| "grad_norm": 1.4999819993972778, | |
| "learning_rate": 3.508211106932431e-05, | |
| "loss": 0.0638, | |
| "step": 47600 | |
| }, | |
| { | |
| "epoch": 2.989845806694246, | |
| "grad_norm": 0.5380005240440369, | |
| "learning_rate": 3.5050770966528775e-05, | |
| "loss": 0.0846, | |
| "step": 47700 | |
| }, | |
| { | |
| "epoch": 2.996113827253353, | |
| "grad_norm": 0.09019165486097336, | |
| "learning_rate": 3.501943086373323e-05, | |
| "loss": 0.141, | |
| "step": 47800 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_f1": 0.9587811491105839, | |
| "eval_loss": 0.19986020028591156, | |
| "eval_runtime": 46.4942, | |
| "eval_samples_per_second": 305.006, | |
| "eval_steps_per_second": 38.134, | |
| "step": 47862 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 159540, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.9570980759088435e+17, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |