|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9947770188830853, |
|
"eval_steps": 500, |
|
"global_step": 1242, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0024106066693451184, |
|
"grad_norm": 6.773519515991211, |
|
"learning_rate": 8e-08, |
|
"loss": 1.239, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004821213338690237, |
|
"grad_norm": 6.920924186706543, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.2456, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.007231820008035355, |
|
"grad_norm": 6.668229579925537, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.2141, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.009642426677380474, |
|
"grad_norm": 6.902156829833984, |
|
"learning_rate": 3.2e-07, |
|
"loss": 1.256, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.012053033346725592, |
|
"grad_norm": 6.876142501831055, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.2453, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01446364001607071, |
|
"grad_norm": 6.706584930419922, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.2398, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01687424668541583, |
|
"grad_norm": 6.669106960296631, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.245, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.019284853354760947, |
|
"grad_norm": 6.519770622253418, |
|
"learning_rate": 6.4e-07, |
|
"loss": 1.2305, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.021695460024106068, |
|
"grad_norm": 6.1270647048950195, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.2086, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.024106066693451184, |
|
"grad_norm": 6.2503509521484375, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.2539, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.026516673362796304, |
|
"grad_norm": 6.232725620269775, |
|
"learning_rate": 8.8e-07, |
|
"loss": 1.2319, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02892728003214142, |
|
"grad_norm": 5.046456336975098, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.2281, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03133788670148654, |
|
"grad_norm": 4.612127780914307, |
|
"learning_rate": 1.04e-06, |
|
"loss": 1.1698, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03374849337083166, |
|
"grad_norm": 4.423837661743164, |
|
"learning_rate": 1.12e-06, |
|
"loss": 1.1725, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03615910004017678, |
|
"grad_norm": 4.467825412750244, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.1782, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.038569706709521895, |
|
"grad_norm": 2.7753546237945557, |
|
"learning_rate": 1.28e-06, |
|
"loss": 1.1366, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04098031337886701, |
|
"grad_norm": 2.8587357997894287, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 1.1537, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.043390920048212135, |
|
"grad_norm": 2.703477621078491, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.1033, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04580152671755725, |
|
"grad_norm": 2.3742921352386475, |
|
"learning_rate": 1.52e-06, |
|
"loss": 1.1578, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04821213338690237, |
|
"grad_norm": 2.467975378036499, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.1165, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05062274005624749, |
|
"grad_norm": 2.0225586891174316, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 1.1315, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05303334672559261, |
|
"grad_norm": 2.7744572162628174, |
|
"learning_rate": 1.76e-06, |
|
"loss": 1.0772, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.055443953394937726, |
|
"grad_norm": 3.2976622581481934, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 1.04, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05785456006428284, |
|
"grad_norm": 3.5405001640319824, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.1221, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.060265166733627966, |
|
"grad_norm": 3.3569343090057373, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.109, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06267577340297308, |
|
"grad_norm": 2.680312395095825, |
|
"learning_rate": 2.08e-06, |
|
"loss": 1.0854, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0650863800723182, |
|
"grad_norm": 2.1931915283203125, |
|
"learning_rate": 2.16e-06, |
|
"loss": 1.0211, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06749698674166332, |
|
"grad_norm": 1.8890901803970337, |
|
"learning_rate": 2.24e-06, |
|
"loss": 1.0191, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06990759341100844, |
|
"grad_norm": 1.484052062034607, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 1.0685, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07231820008035356, |
|
"grad_norm": 1.6618725061416626, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.0696, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07472880674969867, |
|
"grad_norm": 1.8293867111206055, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 1.0431, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07713941341904379, |
|
"grad_norm": 1.7599067687988281, |
|
"learning_rate": 2.56e-06, |
|
"loss": 1.022, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0795500200883889, |
|
"grad_norm": 1.6270533800125122, |
|
"learning_rate": 2.64e-06, |
|
"loss": 1.0117, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08196062675773402, |
|
"grad_norm": 1.256848931312561, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 0.9599, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08437123342707915, |
|
"grad_norm": 1.0509469509124756, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.958, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08678184009642427, |
|
"grad_norm": 1.0251898765563965, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.0048, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08919244676576939, |
|
"grad_norm": 1.0901225805282593, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.9968, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0916030534351145, |
|
"grad_norm": 1.0281929969787598, |
|
"learning_rate": 3.04e-06, |
|
"loss": 0.9496, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09401366010445962, |
|
"grad_norm": 1.0354727506637573, |
|
"learning_rate": 3.12e-06, |
|
"loss": 0.9807, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09642426677380474, |
|
"grad_norm": 0.8969101309776306, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.963, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09883487344314985, |
|
"grad_norm": 0.8526788949966431, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 0.9729, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.10124548011249498, |
|
"grad_norm": 0.834715723991394, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 0.9709, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1036560867818401, |
|
"grad_norm": 0.7841347455978394, |
|
"learning_rate": 3.44e-06, |
|
"loss": 0.9866, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.10606669345118522, |
|
"grad_norm": 0.7787656784057617, |
|
"learning_rate": 3.52e-06, |
|
"loss": 0.9598, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.10847730012053033, |
|
"grad_norm": 0.8190361857414246, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.9298, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11088790678987545, |
|
"grad_norm": 0.7821114659309387, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 0.9354, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.11329851345922057, |
|
"grad_norm": 0.7987552881240845, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 0.9752, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11570912012856568, |
|
"grad_norm": 0.7736258506774902, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 0.9756, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1181197267979108, |
|
"grad_norm": 0.7108762264251709, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 0.9073, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12053033346725593, |
|
"grad_norm": 0.8190523982048035, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.948, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12294094013660105, |
|
"grad_norm": 0.8935843110084534, |
|
"learning_rate": 4.08e-06, |
|
"loss": 0.9452, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12535154680594615, |
|
"grad_norm": 0.7128339409828186, |
|
"learning_rate": 4.16e-06, |
|
"loss": 0.9387, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12776215347529127, |
|
"grad_norm": 0.8199414014816284, |
|
"learning_rate": 4.24e-06, |
|
"loss": 0.9474, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1301727601446364, |
|
"grad_norm": 0.7801357507705688, |
|
"learning_rate": 4.32e-06, |
|
"loss": 0.8844, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13258336681398153, |
|
"grad_norm": 0.7618433833122253, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.8656, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13499397348332665, |
|
"grad_norm": 0.6135762929916382, |
|
"learning_rate": 4.48e-06, |
|
"loss": 0.9193, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13740458015267176, |
|
"grad_norm": 0.6238518953323364, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.8669, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.13981518682201688, |
|
"grad_norm": 0.720568060874939, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 0.9042, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.142225793491362, |
|
"grad_norm": 0.689953088760376, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 0.9123, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1446364001607071, |
|
"grad_norm": 0.6999253630638123, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.9165, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14704700683005223, |
|
"grad_norm": 0.709952175617218, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 0.9101, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.14945761349939735, |
|
"grad_norm": 0.5979318618774414, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.9097, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.15186822016874246, |
|
"grad_norm": 0.6467641592025757, |
|
"learning_rate": 5.04e-06, |
|
"loss": 0.9056, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.15427882683808758, |
|
"grad_norm": 0.648495078086853, |
|
"learning_rate": 5.12e-06, |
|
"loss": 0.9198, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1566894335074327, |
|
"grad_norm": 0.5750287175178528, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.9235, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1591000401767778, |
|
"grad_norm": 0.5516173243522644, |
|
"learning_rate": 5.28e-06, |
|
"loss": 0.88, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16151064684612293, |
|
"grad_norm": 0.6047608256340027, |
|
"learning_rate": 5.36e-06, |
|
"loss": 0.8667, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.16392125351546805, |
|
"grad_norm": 0.5874326229095459, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 0.9056, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1663318601848132, |
|
"grad_norm": 0.6885890960693359, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 0.8944, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1687424668541583, |
|
"grad_norm": 0.624688446521759, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.9077, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17115307352350342, |
|
"grad_norm": 0.6586514115333557, |
|
"learning_rate": 5.68e-06, |
|
"loss": 0.923, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.17356368019284854, |
|
"grad_norm": 0.7220573425292969, |
|
"learning_rate": 5.76e-06, |
|
"loss": 0.8937, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.17597428686219366, |
|
"grad_norm": 0.5960977673530579, |
|
"learning_rate": 5.84e-06, |
|
"loss": 0.8584, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17838489353153877, |
|
"grad_norm": 0.7161673903465271, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.9043, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1807955002008839, |
|
"grad_norm": 0.6262743473052979, |
|
"learning_rate": 6e-06, |
|
"loss": 0.8694, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.183206106870229, |
|
"grad_norm": 0.6736229658126831, |
|
"learning_rate": 6.08e-06, |
|
"loss": 0.9061, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.18561671353957412, |
|
"grad_norm": 0.6308712363243103, |
|
"learning_rate": 6.16e-06, |
|
"loss": 0.8839, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.18802732020891924, |
|
"grad_norm": 0.5172812342643738, |
|
"learning_rate": 6.24e-06, |
|
"loss": 0.8883, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19043792687826436, |
|
"grad_norm": 0.7116965055465698, |
|
"learning_rate": 6.3200000000000005e-06, |
|
"loss": 0.8948, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.19284853354760947, |
|
"grad_norm": 0.8339863419532776, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.8787, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1952591402169546, |
|
"grad_norm": 0.6410143375396729, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 0.8969, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1976697468862997, |
|
"grad_norm": 0.6388810276985168, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.8527, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.20008035355564482, |
|
"grad_norm": 0.7475878596305847, |
|
"learning_rate": 6.640000000000001e-06, |
|
"loss": 0.8816, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.20249096022498997, |
|
"grad_norm": 0.626528263092041, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 0.8909, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.20490156689433509, |
|
"grad_norm": 0.6144326329231262, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.8917, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2073121735636802, |
|
"grad_norm": 0.551555871963501, |
|
"learning_rate": 6.88e-06, |
|
"loss": 0.8847, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.20972278023302532, |
|
"grad_norm": 0.7386748194694519, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.8986, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.21213338690237044, |
|
"grad_norm": 0.690913200378418, |
|
"learning_rate": 7.04e-06, |
|
"loss": 0.8742, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.21454399357171555, |
|
"grad_norm": 0.5800572037696838, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 0.879, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.21695460024106067, |
|
"grad_norm": 0.5930675268173218, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.9004, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21936520691040579, |
|
"grad_norm": 0.6192178130149841, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 0.8711, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2217758135797509, |
|
"grad_norm": 0.6932560205459595, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 0.8873, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.22418642024909602, |
|
"grad_norm": 0.55876624584198, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 0.8931, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.22659702691844114, |
|
"grad_norm": 0.7649154663085938, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 0.8767, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22900763358778625, |
|
"grad_norm": 0.6134861707687378, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.8998, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.23141824025713137, |
|
"grad_norm": 0.7478952407836914, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 0.8762, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.23382884692647649, |
|
"grad_norm": 0.6455903649330139, |
|
"learning_rate": 7.76e-06, |
|
"loss": 0.8494, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2362394535958216, |
|
"grad_norm": 0.6212043762207031, |
|
"learning_rate": 7.840000000000001e-06, |
|
"loss": 0.904, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.23865006026516675, |
|
"grad_norm": 0.6533855199813843, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.8989, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.24106066693451186, |
|
"grad_norm": 0.7439046502113342, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.9157, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.24347127360385698, |
|
"grad_norm": 0.6541479825973511, |
|
"learning_rate": 8.08e-06, |
|
"loss": 0.8937, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2458818802732021, |
|
"grad_norm": 0.5813032388687134, |
|
"learning_rate": 8.16e-06, |
|
"loss": 0.8428, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2482924869425472, |
|
"grad_norm": 0.8269535303115845, |
|
"learning_rate": 8.24e-06, |
|
"loss": 0.8666, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2507030936118923, |
|
"grad_norm": 0.6725351214408875, |
|
"learning_rate": 8.32e-06, |
|
"loss": 0.8399, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2531137002812374, |
|
"grad_norm": 0.5954082608222961, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.8907, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.25552430695058254, |
|
"grad_norm": 0.6678988337516785, |
|
"learning_rate": 8.48e-06, |
|
"loss": 0.8923, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2579349136199277, |
|
"grad_norm": 0.6849507689476013, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.8495, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2603455202892728, |
|
"grad_norm": 0.6548796892166138, |
|
"learning_rate": 8.64e-06, |
|
"loss": 0.8477, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.26275612695861794, |
|
"grad_norm": 0.6808841228485107, |
|
"learning_rate": 8.720000000000001e-06, |
|
"loss": 0.8596, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.26516673362796306, |
|
"grad_norm": 0.6582109928131104, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.8719, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2675773402973082, |
|
"grad_norm": 0.6607386469841003, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 0.8577, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2699879469666533, |
|
"grad_norm": 0.7141919732093811, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.8602, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2723985536359984, |
|
"grad_norm": 0.6431668996810913, |
|
"learning_rate": 9.040000000000002e-06, |
|
"loss": 0.8821, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2748091603053435, |
|
"grad_norm": 0.6762509346008301, |
|
"learning_rate": 9.12e-06, |
|
"loss": 0.888, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.27721976697468864, |
|
"grad_norm": 0.5908951759338379, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.8642, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.27963037364403376, |
|
"grad_norm": 0.7063172459602356, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 0.8634, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2820409803133789, |
|
"grad_norm": 0.6021769642829895, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 0.8651, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.284451586982724, |
|
"grad_norm": 0.6814525127410889, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 0.8856, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2868621936520691, |
|
"grad_norm": 0.8552815914154053, |
|
"learning_rate": 9.52e-06, |
|
"loss": 0.8649, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2892728003214142, |
|
"grad_norm": 0.7426092624664307, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.8983, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.29168340699075934, |
|
"grad_norm": 0.785123884677887, |
|
"learning_rate": 9.68e-06, |
|
"loss": 0.9044, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.29409401366010446, |
|
"grad_norm": 0.6917296051979065, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 0.8708, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2965046203294496, |
|
"grad_norm": 0.7814741134643555, |
|
"learning_rate": 9.84e-06, |
|
"loss": 0.8223, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2989152269987947, |
|
"grad_norm": 0.6025778651237488, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 0.8356, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3013258336681398, |
|
"grad_norm": 0.8133845925331116, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8625, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3037364403374849, |
|
"grad_norm": 0.673011064529419, |
|
"learning_rate": 9.999980224242792e-06, |
|
"loss": 0.8497, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.30614704700683004, |
|
"grad_norm": 0.684025228023529, |
|
"learning_rate": 9.999920897127592e-06, |
|
"loss": 0.8637, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.30855765367617516, |
|
"grad_norm": 0.6771091818809509, |
|
"learning_rate": 9.999822019123702e-06, |
|
"loss": 0.8525, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3109682603455203, |
|
"grad_norm": 0.6857827305793762, |
|
"learning_rate": 9.999683591013273e-06, |
|
"loss": 0.8556, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3133788670148654, |
|
"grad_norm": 0.8058888912200928, |
|
"learning_rate": 9.999505613891315e-06, |
|
"loss": 0.884, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 0.8600525856018066, |
|
"learning_rate": 9.99928808916568e-06, |
|
"loss": 0.8599, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3182000803535556, |
|
"grad_norm": 0.744256317615509, |
|
"learning_rate": 9.999031018557055e-06, |
|
"loss": 0.8926, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.32061068702290074, |
|
"grad_norm": 0.836509644985199, |
|
"learning_rate": 9.998734404098946e-06, |
|
"loss": 0.8937, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.32302129369224586, |
|
"grad_norm": 0.8581554293632507, |
|
"learning_rate": 9.998398248137665e-06, |
|
"loss": 0.8964, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.325431900361591, |
|
"grad_norm": 0.8593453764915466, |
|
"learning_rate": 9.998022553332304e-06, |
|
"loss": 0.8905, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3278425070309361, |
|
"grad_norm": 0.9017872214317322, |
|
"learning_rate": 9.997607322654728e-06, |
|
"loss": 0.9049, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.33025311370028126, |
|
"grad_norm": 1.098368763923645, |
|
"learning_rate": 9.997152559389532e-06, |
|
"loss": 0.8501, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3326637203696264, |
|
"grad_norm": 0.7333061099052429, |
|
"learning_rate": 9.996658267134033e-06, |
|
"loss": 0.8718, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3350743270389715, |
|
"grad_norm": 0.929083526134491, |
|
"learning_rate": 9.996124449798234e-06, |
|
"loss": 0.8985, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3374849337083166, |
|
"grad_norm": 0.8057188987731934, |
|
"learning_rate": 9.995551111604789e-06, |
|
"loss": 0.8956, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.33989554037766173, |
|
"grad_norm": 0.6737269163131714, |
|
"learning_rate": 9.994938257088979e-06, |
|
"loss": 0.8837, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.34230614704700685, |
|
"grad_norm": 0.7001059055328369, |
|
"learning_rate": 9.99428589109867e-06, |
|
"loss": 0.8856, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.34471675371635196, |
|
"grad_norm": 0.7277160286903381, |
|
"learning_rate": 9.993594018794271e-06, |
|
"loss": 0.8651, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3471273603856971, |
|
"grad_norm": 0.6378852725028992, |
|
"learning_rate": 9.992862645648702e-06, |
|
"loss": 0.8599, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3495379670550422, |
|
"grad_norm": 0.756850004196167, |
|
"learning_rate": 9.992091777447348e-06, |
|
"loss": 0.8509, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3519485737243873, |
|
"grad_norm": 0.6996802687644958, |
|
"learning_rate": 9.991281420288011e-06, |
|
"loss": 0.8632, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.35435918039373243, |
|
"grad_norm": 0.6778576374053955, |
|
"learning_rate": 9.99043158058086e-06, |
|
"loss": 0.9116, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.35676978706307755, |
|
"grad_norm": 0.8207915425300598, |
|
"learning_rate": 9.989542265048383e-06, |
|
"loss": 0.8752, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.35918039373242266, |
|
"grad_norm": 0.6588841676712036, |
|
"learning_rate": 9.988613480725336e-06, |
|
"loss": 0.8923, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3615910004017678, |
|
"grad_norm": 0.7442706227302551, |
|
"learning_rate": 9.987645234958687e-06, |
|
"loss": 0.9068, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3640016070711129, |
|
"grad_norm": 0.8011964559555054, |
|
"learning_rate": 9.986637535407549e-06, |
|
"loss": 0.8491, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.366412213740458, |
|
"grad_norm": 0.6727583408355713, |
|
"learning_rate": 9.985590390043133e-06, |
|
"loss": 0.855, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.36882282040980313, |
|
"grad_norm": 0.6590178608894348, |
|
"learning_rate": 9.984503807148677e-06, |
|
"loss": 0.8439, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.37123342707914825, |
|
"grad_norm": 0.667187511920929, |
|
"learning_rate": 9.98337779531938e-06, |
|
"loss": 0.8649, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.37364403374849336, |
|
"grad_norm": 0.7421004176139832, |
|
"learning_rate": 9.982212363462336e-06, |
|
"loss": 0.8597, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3760546404178385, |
|
"grad_norm": 0.689328134059906, |
|
"learning_rate": 9.981007520796463e-06, |
|
"loss": 0.8798, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3784652470871836, |
|
"grad_norm": 0.7125187516212463, |
|
"learning_rate": 9.979763276852435e-06, |
|
"loss": 0.8953, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3808758537565287, |
|
"grad_norm": 0.6684549450874329, |
|
"learning_rate": 9.978479641472593e-06, |
|
"loss": 0.8499, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.38328646042587383, |
|
"grad_norm": 0.7804553508758545, |
|
"learning_rate": 9.977156624810887e-06, |
|
"loss": 0.8555, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.38569706709521895, |
|
"grad_norm": 0.7177390456199646, |
|
"learning_rate": 9.975794237332776e-06, |
|
"loss": 0.8284, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.38810767376456407, |
|
"grad_norm": 0.6415156722068787, |
|
"learning_rate": 9.97439248981516e-06, |
|
"loss": 0.8436, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3905182804339092, |
|
"grad_norm": 0.6989284157752991, |
|
"learning_rate": 9.972951393346285e-06, |
|
"loss": 0.8369, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3929288871032543, |
|
"grad_norm": 0.6734793782234192, |
|
"learning_rate": 9.97147095932566e-06, |
|
"loss": 0.8379, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3953394937725994, |
|
"grad_norm": 0.6882381439208984, |
|
"learning_rate": 9.969951199463969e-06, |
|
"loss": 0.864, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.39775010044194453, |
|
"grad_norm": 0.6084800958633423, |
|
"learning_rate": 9.96839212578297e-06, |
|
"loss": 0.8671, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.40016070711128965, |
|
"grad_norm": 0.639119565486908, |
|
"learning_rate": 9.96679375061541e-06, |
|
"loss": 0.8115, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4025713137806348, |
|
"grad_norm": 0.6982107758522034, |
|
"learning_rate": 9.96515608660492e-06, |
|
"loss": 0.876, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.40498192044997994, |
|
"grad_norm": 0.6693940758705139, |
|
"learning_rate": 9.963479146705916e-06, |
|
"loss": 0.8405, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.40739252711932505, |
|
"grad_norm": 0.6644041538238525, |
|
"learning_rate": 9.961762944183506e-06, |
|
"loss": 0.8867, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.40980313378867017, |
|
"grad_norm": 0.5994226932525635, |
|
"learning_rate": 9.960007492613368e-06, |
|
"loss": 0.8227, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4122137404580153, |
|
"grad_norm": 0.6915552616119385, |
|
"learning_rate": 9.958212805881656e-06, |
|
"loss": 0.81, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4146243471273604, |
|
"grad_norm": 0.6233800053596497, |
|
"learning_rate": 9.956378898184884e-06, |
|
"loss": 0.842, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4170349537967055, |
|
"grad_norm": 0.5291222333908081, |
|
"learning_rate": 9.95450578402982e-06, |
|
"loss": 0.8306, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.41944556046605064, |
|
"grad_norm": 0.6305242776870728, |
|
"learning_rate": 9.952593478233363e-06, |
|
"loss": 0.8728, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.42185616713539575, |
|
"grad_norm": 1.1465144157409668, |
|
"learning_rate": 9.950641995922432e-06, |
|
"loss": 0.8192, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.42426677380474087, |
|
"grad_norm": 0.7458274364471436, |
|
"learning_rate": 9.948651352533843e-06, |
|
"loss": 0.8491, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.426677380474086, |
|
"grad_norm": 0.6428912878036499, |
|
"learning_rate": 9.946621563814188e-06, |
|
"loss": 0.859, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4290879871434311, |
|
"grad_norm": 0.7144963145256042, |
|
"learning_rate": 9.94455264581971e-06, |
|
"loss": 0.8452, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4314985938127762, |
|
"grad_norm": 0.7666320204734802, |
|
"learning_rate": 9.942444614916177e-06, |
|
"loss": 0.8686, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.43390920048212134, |
|
"grad_norm": 0.6040058732032776, |
|
"learning_rate": 9.940297487778753e-06, |
|
"loss": 0.8794, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43631980715146645, |
|
"grad_norm": 0.7141439914703369, |
|
"learning_rate": 9.938111281391862e-06, |
|
"loss": 0.85, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.43873041382081157, |
|
"grad_norm": 0.6317766308784485, |
|
"learning_rate": 9.93588601304906e-06, |
|
"loss": 0.8832, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4411410204901567, |
|
"grad_norm": 0.7072132229804993, |
|
"learning_rate": 9.933621700352896e-06, |
|
"loss": 0.8322, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4435516271595018, |
|
"grad_norm": 0.6719093322753906, |
|
"learning_rate": 9.931318361214764e-06, |
|
"loss": 0.8834, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4459622338288469, |
|
"grad_norm": 0.6736400127410889, |
|
"learning_rate": 9.92897601385478e-06, |
|
"loss": 0.8394, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.44837284049819204, |
|
"grad_norm": 0.6415509581565857, |
|
"learning_rate": 9.926594676801615e-06, |
|
"loss": 0.8245, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.45078344716753715, |
|
"grad_norm": 0.7117191553115845, |
|
"learning_rate": 9.92417436889237e-06, |
|
"loss": 0.8252, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.45319405383688227, |
|
"grad_norm": 0.693242609500885, |
|
"learning_rate": 9.921715109272413e-06, |
|
"loss": 0.886, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4556046605062274, |
|
"grad_norm": 0.7100809812545776, |
|
"learning_rate": 9.919216917395234e-06, |
|
"loss": 0.8462, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4580152671755725, |
|
"grad_norm": 0.6412146091461182, |
|
"learning_rate": 9.916679813022284e-06, |
|
"loss": 0.8802, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4604258738449176, |
|
"grad_norm": 0.6167450547218323, |
|
"learning_rate": 9.91410381622283e-06, |
|
"loss": 0.8365, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.46283648051426274, |
|
"grad_norm": 0.6116438508033752, |
|
"learning_rate": 9.911488947373787e-06, |
|
"loss": 0.8444, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.46524708718360785, |
|
"grad_norm": 0.7030880451202393, |
|
"learning_rate": 9.908835227159559e-06, |
|
"loss": 0.8827, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.46765769385295297, |
|
"grad_norm": 0.6337934136390686, |
|
"learning_rate": 9.906142676571874e-06, |
|
"loss": 0.8591, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4700683005222981, |
|
"grad_norm": 0.6397708058357239, |
|
"learning_rate": 9.903411316909626e-06, |
|
"loss": 0.8425, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4724789071916432, |
|
"grad_norm": 0.6162590384483337, |
|
"learning_rate": 9.900641169778695e-06, |
|
"loss": 0.8382, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4748895138609883, |
|
"grad_norm": 0.7089294195175171, |
|
"learning_rate": 9.897832257091787e-06, |
|
"loss": 0.82, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4773001205303335, |
|
"grad_norm": 0.6308555006980896, |
|
"learning_rate": 9.894984601068248e-06, |
|
"loss": 0.8321, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4797107271996786, |
|
"grad_norm": 0.7055140733718872, |
|
"learning_rate": 9.892098224233901e-06, |
|
"loss": 0.8574, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4821213338690237, |
|
"grad_norm": 0.6801459789276123, |
|
"learning_rate": 9.889173149420864e-06, |
|
"loss": 0.8264, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.48453194053836884, |
|
"grad_norm": 0.6252164244651794, |
|
"learning_rate": 9.886209399767362e-06, |
|
"loss": 0.8595, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.48694254720771396, |
|
"grad_norm": 0.7609124183654785, |
|
"learning_rate": 9.883206998717551e-06, |
|
"loss": 0.8773, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4893531538770591, |
|
"grad_norm": 0.6454911231994629, |
|
"learning_rate": 9.880165970021337e-06, |
|
"loss": 0.897, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.4917637605464042, |
|
"grad_norm": 0.6188477873802185, |
|
"learning_rate": 9.877086337734175e-06, |
|
"loss": 0.8276, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.4941743672157493, |
|
"grad_norm": 0.7367669343948364, |
|
"learning_rate": 9.87396812621689e-06, |
|
"loss": 0.8334, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4965849738850944, |
|
"grad_norm": 0.6990851759910583, |
|
"learning_rate": 9.870811360135477e-06, |
|
"loss": 0.8334, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.49899558055443954, |
|
"grad_norm": 0.6364102363586426, |
|
"learning_rate": 9.867616064460914e-06, |
|
"loss": 0.8363, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5014061872237846, |
|
"grad_norm": 0.7343196272850037, |
|
"learning_rate": 9.86438226446896e-06, |
|
"loss": 0.8406, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5038167938931297, |
|
"grad_norm": 0.7273004651069641, |
|
"learning_rate": 9.86110998573995e-06, |
|
"loss": 0.8257, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5062274005624748, |
|
"grad_norm": 0.7101020812988281, |
|
"learning_rate": 9.8577992541586e-06, |
|
"loss": 0.8309, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.50863800723182, |
|
"grad_norm": 0.8196221590042114, |
|
"learning_rate": 9.8544500959138e-06, |
|
"loss": 0.8353, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5110486139011651, |
|
"grad_norm": 0.8963719010353088, |
|
"learning_rate": 9.851062537498403e-06, |
|
"loss": 0.8714, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5134592205705103, |
|
"grad_norm": 0.7297502756118774, |
|
"learning_rate": 9.847636605709028e-06, |
|
"loss": 0.8901, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.5158698272398554, |
|
"grad_norm": 0.7607648372650146, |
|
"learning_rate": 9.844172327645829e-06, |
|
"loss": 0.8798, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.5182804339092005, |
|
"grad_norm": 0.9088441729545593, |
|
"learning_rate": 9.840669730712293e-06, |
|
"loss": 0.8583, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.5206910405785456, |
|
"grad_norm": 0.7284877896308899, |
|
"learning_rate": 9.83712884261503e-06, |
|
"loss": 0.8847, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.5231016472478908, |
|
"grad_norm": 0.6995714902877808, |
|
"learning_rate": 9.833549691363529e-06, |
|
"loss": 0.8279, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.5255122539172359, |
|
"grad_norm": 0.7118185758590698, |
|
"learning_rate": 9.829932305269964e-06, |
|
"loss": 0.8622, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.527922860586581, |
|
"grad_norm": 0.6980568766593933, |
|
"learning_rate": 9.826276712948957e-06, |
|
"loss": 0.836, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.5303334672559261, |
|
"grad_norm": 0.6804867386817932, |
|
"learning_rate": 9.822582943317346e-06, |
|
"loss": 0.9001, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5327440739252712, |
|
"grad_norm": 0.6723496317863464, |
|
"learning_rate": 9.818851025593972e-06, |
|
"loss": 0.8552, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.5351546805946163, |
|
"grad_norm": 0.6647019386291504, |
|
"learning_rate": 9.815080989299433e-06, |
|
"loss": 0.8125, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.5375652872639615, |
|
"grad_norm": 0.6050721406936646, |
|
"learning_rate": 9.811272864255853e-06, |
|
"loss": 0.8449, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.5399758939333066, |
|
"grad_norm": 0.61509770154953, |
|
"learning_rate": 9.807426680586663e-06, |
|
"loss": 0.8162, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5423865006026517, |
|
"grad_norm": 0.6345648169517517, |
|
"learning_rate": 9.803542468716335e-06, |
|
"loss": 0.852, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5447971072719968, |
|
"grad_norm": 0.7016398310661316, |
|
"learning_rate": 9.799620259370164e-06, |
|
"loss": 0.8968, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5472077139413419, |
|
"grad_norm": 0.6513635516166687, |
|
"learning_rate": 9.795660083574012e-06, |
|
"loss": 0.8157, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.549618320610687, |
|
"grad_norm": 0.7561305165290833, |
|
"learning_rate": 9.791661972654069e-06, |
|
"loss": 0.8856, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5520289272800322, |
|
"grad_norm": 0.7972825169563293, |
|
"learning_rate": 9.787625958236606e-06, |
|
"loss": 0.8519, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5544395339493773, |
|
"grad_norm": 0.7326191663742065, |
|
"learning_rate": 9.783552072247717e-06, |
|
"loss": 0.832, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5568501406187224, |
|
"grad_norm": 0.5969269275665283, |
|
"learning_rate": 9.779440346913073e-06, |
|
"loss": 0.8385, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5592607472880675, |
|
"grad_norm": 0.8821382522583008, |
|
"learning_rate": 9.775290814757672e-06, |
|
"loss": 0.8612, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5616713539574126, |
|
"grad_norm": 0.6894771456718445, |
|
"learning_rate": 9.771103508605563e-06, |
|
"loss": 0.8681, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.5640819606267578, |
|
"grad_norm": 0.6771490573883057, |
|
"learning_rate": 9.766878461579613e-06, |
|
"loss": 0.8352, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5664925672961029, |
|
"grad_norm": 0.6654596328735352, |
|
"learning_rate": 9.76261570710122e-06, |
|
"loss": 0.8647, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.568903173965448, |
|
"grad_norm": 0.7922803163528442, |
|
"learning_rate": 9.758315278890062e-06, |
|
"loss": 0.8771, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5713137806347931, |
|
"grad_norm": 0.6588435769081116, |
|
"learning_rate": 9.75397721096383e-06, |
|
"loss": 0.8489, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5737243873041382, |
|
"grad_norm": 1.0128016471862793, |
|
"learning_rate": 9.749601537637956e-06, |
|
"loss": 0.8572, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5761349939734833, |
|
"grad_norm": 0.8538505434989929, |
|
"learning_rate": 9.745188293525341e-06, |
|
"loss": 0.8538, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5785456006428285, |
|
"grad_norm": 0.7448136806488037, |
|
"learning_rate": 9.74073751353608e-06, |
|
"loss": 0.8636, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5809562073121736, |
|
"grad_norm": 0.9223302006721497, |
|
"learning_rate": 9.736249232877195e-06, |
|
"loss": 0.8513, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5833668139815187, |
|
"grad_norm": 0.5682440400123596, |
|
"learning_rate": 9.731723487052344e-06, |
|
"loss": 0.8201, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5857774206508638, |
|
"grad_norm": 0.7429246306419373, |
|
"learning_rate": 9.727160311861546e-06, |
|
"loss": 0.8508, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5881880273202089, |
|
"grad_norm": 0.7460123896598816, |
|
"learning_rate": 9.722559743400899e-06, |
|
"loss": 0.8561, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.590598633989554, |
|
"grad_norm": 0.6176005601882935, |
|
"learning_rate": 9.717921818062294e-06, |
|
"loss": 0.8359, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5930092406588992, |
|
"grad_norm": 0.6871511936187744, |
|
"learning_rate": 9.713246572533125e-06, |
|
"loss": 0.8682, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5954198473282443, |
|
"grad_norm": 0.731232225894928, |
|
"learning_rate": 9.708534043796001e-06, |
|
"loss": 0.8584, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5978304539975894, |
|
"grad_norm": 0.6314242482185364, |
|
"learning_rate": 9.703784269128449e-06, |
|
"loss": 0.8193, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.6002410606669345, |
|
"grad_norm": 0.748142659664154, |
|
"learning_rate": 9.698997286102627e-06, |
|
"loss": 0.8422, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.6026516673362796, |
|
"grad_norm": 0.5373815298080444, |
|
"learning_rate": 9.694173132585021e-06, |
|
"loss": 0.8267, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6050622740056247, |
|
"grad_norm": 0.7000875473022461, |
|
"learning_rate": 9.689311846736144e-06, |
|
"loss": 0.8283, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.6074728806749699, |
|
"grad_norm": 0.6611809730529785, |
|
"learning_rate": 9.684413467010246e-06, |
|
"loss": 0.8126, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.609883487344315, |
|
"grad_norm": 0.6031693816184998, |
|
"learning_rate": 9.679478032154986e-06, |
|
"loss": 0.8318, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.6122940940136601, |
|
"grad_norm": 0.8515450954437256, |
|
"learning_rate": 9.674505581211153e-06, |
|
"loss": 0.8713, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.6147047006830052, |
|
"grad_norm": 0.5968319773674011, |
|
"learning_rate": 9.669496153512338e-06, |
|
"loss": 0.8357, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.6171153073523503, |
|
"grad_norm": 0.6999500393867493, |
|
"learning_rate": 9.664449788684636e-06, |
|
"loss": 0.8438, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.6195259140216954, |
|
"grad_norm": 0.6063684225082397, |
|
"learning_rate": 9.659366526646316e-06, |
|
"loss": 0.8455, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.6219365206910406, |
|
"grad_norm": 0.5711807012557983, |
|
"learning_rate": 9.654246407607522e-06, |
|
"loss": 0.8414, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.6243471273603857, |
|
"grad_norm": 0.6388696432113647, |
|
"learning_rate": 9.649089472069948e-06, |
|
"loss": 0.8351, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.6267577340297308, |
|
"grad_norm": 0.621222972869873, |
|
"learning_rate": 9.643895760826514e-06, |
|
"loss": 0.8628, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6291683406990759, |
|
"grad_norm": 0.6475227475166321, |
|
"learning_rate": 9.63866531496105e-06, |
|
"loss": 0.8221, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.7363802790641785, |
|
"learning_rate": 9.633398175847968e-06, |
|
"loss": 0.867, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6339895540377661, |
|
"grad_norm": 0.6129623055458069, |
|
"learning_rate": 9.628094385151931e-06, |
|
"loss": 0.865, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.6364001607071113, |
|
"grad_norm": 0.7092401385307312, |
|
"learning_rate": 9.62275398482753e-06, |
|
"loss": 0.8541, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.6388107673764564, |
|
"grad_norm": 0.6516185402870178, |
|
"learning_rate": 9.617377017118953e-06, |
|
"loss": 0.8549, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6412213740458015, |
|
"grad_norm": 0.604061484336853, |
|
"learning_rate": 9.61196352455964e-06, |
|
"loss": 0.8562, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.6436319807151466, |
|
"grad_norm": 0.5958904027938843, |
|
"learning_rate": 9.606513549971958e-06, |
|
"loss": 0.8444, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.6460425873844917, |
|
"grad_norm": 0.7973148226737976, |
|
"learning_rate": 9.601027136466854e-06, |
|
"loss": 0.857, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6484531940538368, |
|
"grad_norm": 0.5656829476356506, |
|
"learning_rate": 9.595504327443526e-06, |
|
"loss": 0.7811, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.650863800723182, |
|
"grad_norm": 0.7320337891578674, |
|
"learning_rate": 9.58994516658906e-06, |
|
"loss": 0.8818, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6532744073925271, |
|
"grad_norm": 0.6266824007034302, |
|
"learning_rate": 9.584349697878106e-06, |
|
"loss": 0.8544, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6556850140618722, |
|
"grad_norm": 0.6036157011985779, |
|
"learning_rate": 9.578717965572515e-06, |
|
"loss": 0.7667, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6580956207312173, |
|
"grad_norm": 0.7117894887924194, |
|
"learning_rate": 9.573050014220995e-06, |
|
"loss": 0.8645, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.6605062274005625, |
|
"grad_norm": 0.7956337332725525, |
|
"learning_rate": 9.56734588865876e-06, |
|
"loss": 0.868, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6629168340699076, |
|
"grad_norm": 0.607756495475769, |
|
"learning_rate": 9.56160563400717e-06, |
|
"loss": 0.8305, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6653274407392528, |
|
"grad_norm": 0.7335057854652405, |
|
"learning_rate": 9.555829295673376e-06, |
|
"loss": 0.8656, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6677380474085979, |
|
"grad_norm": 0.7326974272727966, |
|
"learning_rate": 9.550016919349964e-06, |
|
"loss": 0.8592, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.670148654077943, |
|
"grad_norm": 0.6197059154510498, |
|
"learning_rate": 9.544168551014594e-06, |
|
"loss": 0.8399, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6725592607472881, |
|
"grad_norm": 0.7401570081710815, |
|
"learning_rate": 9.53828423692963e-06, |
|
"loss": 0.8271, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6749698674166332, |
|
"grad_norm": 0.7390724420547485, |
|
"learning_rate": 9.532364023641776e-06, |
|
"loss": 0.8468, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6773804740859783, |
|
"grad_norm": 0.7907707691192627, |
|
"learning_rate": 9.526407957981714e-06, |
|
"loss": 0.8412, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6797910807553235, |
|
"grad_norm": 0.6936007738113403, |
|
"learning_rate": 9.520416087063728e-06, |
|
"loss": 0.8224, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6822016874246686, |
|
"grad_norm": 0.8337876796722412, |
|
"learning_rate": 9.514388458285332e-06, |
|
"loss": 0.8083, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6846122940940137, |
|
"grad_norm": 0.5903828740119934, |
|
"learning_rate": 9.508325119326893e-06, |
|
"loss": 0.833, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6870229007633588, |
|
"grad_norm": 0.5665065050125122, |
|
"learning_rate": 9.502226118151263e-06, |
|
"loss": 0.832, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6894335074327039, |
|
"grad_norm": 0.7387179136276245, |
|
"learning_rate": 9.496091503003385e-06, |
|
"loss": 0.8727, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.691844114102049, |
|
"grad_norm": 0.5914328694343567, |
|
"learning_rate": 9.489921322409921e-06, |
|
"loss": 0.8251, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6942547207713942, |
|
"grad_norm": 0.5691410303115845, |
|
"learning_rate": 9.483715625178874e-06, |
|
"loss": 0.8397, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6966653274407393, |
|
"grad_norm": 0.687419593334198, |
|
"learning_rate": 9.477474460399184e-06, |
|
"loss": 0.8521, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6990759341100844, |
|
"grad_norm": 0.6092253923416138, |
|
"learning_rate": 9.471197877440358e-06, |
|
"loss": 0.8508, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7014865407794295, |
|
"grad_norm": 0.6598702073097229, |
|
"learning_rate": 9.464885925952066e-06, |
|
"loss": 0.8491, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.7038971474487746, |
|
"grad_norm": 0.5904880166053772, |
|
"learning_rate": 9.458538655863757e-06, |
|
"loss": 0.82, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.7063077541181197, |
|
"grad_norm": 0.5640713572502136, |
|
"learning_rate": 9.45215611738426e-06, |
|
"loss": 0.8126, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.7087183607874649, |
|
"grad_norm": 0.6432963609695435, |
|
"learning_rate": 9.445738361001387e-06, |
|
"loss": 0.859, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.71112896745681, |
|
"grad_norm": 0.5517960786819458, |
|
"learning_rate": 9.439285437481537e-06, |
|
"loss": 0.8392, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.7135395741261551, |
|
"grad_norm": 0.63838791847229, |
|
"learning_rate": 9.432797397869284e-06, |
|
"loss": 0.8457, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.7159501807955002, |
|
"grad_norm": 0.5761669874191284, |
|
"learning_rate": 9.426274293486993e-06, |
|
"loss": 0.8524, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.7183607874648453, |
|
"grad_norm": 0.6037600636482239, |
|
"learning_rate": 9.41971617593439e-06, |
|
"loss": 0.8503, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.7207713941341904, |
|
"grad_norm": 0.6540753841400146, |
|
"learning_rate": 9.413123097088178e-06, |
|
"loss": 0.8565, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.7231820008035356, |
|
"grad_norm": 0.679303765296936, |
|
"learning_rate": 9.4064951091016e-06, |
|
"loss": 0.8388, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7255926074728807, |
|
"grad_norm": 0.7382025122642517, |
|
"learning_rate": 9.399832264404052e-06, |
|
"loss": 0.8398, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.7280032141422258, |
|
"grad_norm": 0.59201979637146, |
|
"learning_rate": 9.393134615700654e-06, |
|
"loss": 0.862, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.7304138208115709, |
|
"grad_norm": 0.6300458312034607, |
|
"learning_rate": 9.386402215971835e-06, |
|
"loss": 0.8409, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.732824427480916, |
|
"grad_norm": 0.7235124707221985, |
|
"learning_rate": 9.379635118472918e-06, |
|
"loss": 0.8404, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.7352350341502611, |
|
"grad_norm": 0.5660662055015564, |
|
"learning_rate": 9.37283337673369e-06, |
|
"loss": 0.8283, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7376456408196063, |
|
"grad_norm": 0.6544215083122253, |
|
"learning_rate": 9.365997044557992e-06, |
|
"loss": 0.8581, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7400562474889514, |
|
"grad_norm": 0.5648017525672913, |
|
"learning_rate": 9.35912617602328e-06, |
|
"loss": 0.8193, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.7424668541582965, |
|
"grad_norm": 0.5997771620750427, |
|
"learning_rate": 9.352220825480205e-06, |
|
"loss": 0.8161, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.7448774608276416, |
|
"grad_norm": 0.7944521903991699, |
|
"learning_rate": 9.345281047552184e-06, |
|
"loss": 0.8277, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.7472880674969867, |
|
"grad_norm": 0.7135575413703918, |
|
"learning_rate": 9.33830689713496e-06, |
|
"loss": 0.8611, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7496986741663318, |
|
"grad_norm": 0.725627601146698, |
|
"learning_rate": 9.331298429396174e-06, |
|
"loss": 0.8211, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.752109280835677, |
|
"grad_norm": 0.7234640121459961, |
|
"learning_rate": 9.32425569977493e-06, |
|
"loss": 0.8243, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.7545198875050221, |
|
"grad_norm": 0.5590807199478149, |
|
"learning_rate": 9.317178763981354e-06, |
|
"loss": 0.8413, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.7569304941743672, |
|
"grad_norm": 0.6750354170799255, |
|
"learning_rate": 9.31006767799615e-06, |
|
"loss": 0.8344, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.7593411008437123, |
|
"grad_norm": 0.5902611613273621, |
|
"learning_rate": 9.302922498070161e-06, |
|
"loss": 0.8472, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7617517075130574, |
|
"grad_norm": 0.5739753246307373, |
|
"learning_rate": 9.295743280723928e-06, |
|
"loss": 0.8692, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7641623141824025, |
|
"grad_norm": 0.6568316221237183, |
|
"learning_rate": 9.288530082747231e-06, |
|
"loss": 0.8055, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7665729208517477, |
|
"grad_norm": 0.5855989456176758, |
|
"learning_rate": 9.281282961198651e-06, |
|
"loss": 0.8115, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.7689835275210928, |
|
"grad_norm": 0.5146446228027344, |
|
"learning_rate": 9.274001973405117e-06, |
|
"loss": 0.8306, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.7713941341904379, |
|
"grad_norm": 0.7348683476448059, |
|
"learning_rate": 9.266687176961446e-06, |
|
"loss": 0.8611, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.773804740859783, |
|
"grad_norm": 0.599945068359375, |
|
"learning_rate": 9.259338629729892e-06, |
|
"loss": 0.8675, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7762153475291281, |
|
"grad_norm": 0.5286161303520203, |
|
"learning_rate": 9.251956389839694e-06, |
|
"loss": 0.8516, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7786259541984732, |
|
"grad_norm": 0.5764496922492981, |
|
"learning_rate": 9.2445405156866e-06, |
|
"loss": 0.842, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7810365608678184, |
|
"grad_norm": 0.5618358254432678, |
|
"learning_rate": 9.237091065932425e-06, |
|
"loss": 0.8319, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7834471675371635, |
|
"grad_norm": 0.5656780004501343, |
|
"learning_rate": 9.22960809950457e-06, |
|
"loss": 0.8773, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7858577742065086, |
|
"grad_norm": 0.6296916604042053, |
|
"learning_rate": 9.222091675595566e-06, |
|
"loss": 0.8201, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7882683808758537, |
|
"grad_norm": 0.5689228773117065, |
|
"learning_rate": 9.214541853662605e-06, |
|
"loss": 0.8149, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7906789875451988, |
|
"grad_norm": 0.6664914488792419, |
|
"learning_rate": 9.206958693427063e-06, |
|
"loss": 0.8184, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.793089594214544, |
|
"grad_norm": 0.6553746461868286, |
|
"learning_rate": 9.199342254874037e-06, |
|
"loss": 0.873, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7955002008838891, |
|
"grad_norm": 0.5962517261505127, |
|
"learning_rate": 9.19169259825186e-06, |
|
"loss": 0.806, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7979108075532342, |
|
"grad_norm": 0.7290959358215332, |
|
"learning_rate": 9.184009784071635e-06, |
|
"loss": 0.8866, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.8003214142225793, |
|
"grad_norm": 0.5864709615707397, |
|
"learning_rate": 9.176293873106747e-06, |
|
"loss": 0.8356, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.8027320208919244, |
|
"grad_norm": 0.6049401164054871, |
|
"learning_rate": 9.168544926392388e-06, |
|
"loss": 0.8064, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.8051426275612696, |
|
"grad_norm": 0.6348953247070312, |
|
"learning_rate": 9.160763005225078e-06, |
|
"loss": 0.8574, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.8075532342306148, |
|
"grad_norm": 0.6881975531578064, |
|
"learning_rate": 9.152948171162168e-06, |
|
"loss": 0.8103, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.8099638408999599, |
|
"grad_norm": 0.6783849000930786, |
|
"learning_rate": 9.14510048602136e-06, |
|
"loss": 0.8533, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.812374447569305, |
|
"grad_norm": 0.5869353413581848, |
|
"learning_rate": 9.137220011880226e-06, |
|
"loss": 0.8219, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.8147850542386501, |
|
"grad_norm": 0.7382720708847046, |
|
"learning_rate": 9.129306811075698e-06, |
|
"loss": 0.8277, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.8171956609079952, |
|
"grad_norm": 0.648872435092926, |
|
"learning_rate": 9.121360946203593e-06, |
|
"loss": 0.7859, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.8196062675773403, |
|
"grad_norm": 0.7907251119613647, |
|
"learning_rate": 9.113382480118108e-06, |
|
"loss": 0.827, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8220168742466855, |
|
"grad_norm": 0.6970083117485046, |
|
"learning_rate": 9.105371475931328e-06, |
|
"loss": 0.8563, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.8244274809160306, |
|
"grad_norm": 0.8111920952796936, |
|
"learning_rate": 9.097327997012723e-06, |
|
"loss": 0.8377, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.8268380875853757, |
|
"grad_norm": 0.7246480584144592, |
|
"learning_rate": 9.089252106988645e-06, |
|
"loss": 0.8585, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.8292486942547208, |
|
"grad_norm": 0.8073788285255432, |
|
"learning_rate": 9.081143869741832e-06, |
|
"loss": 0.8267, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.8316593009240659, |
|
"grad_norm": 0.7655984163284302, |
|
"learning_rate": 9.073003349410895e-06, |
|
"loss": 0.8494, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.834069907593411, |
|
"grad_norm": 0.6647078990936279, |
|
"learning_rate": 9.064830610389818e-06, |
|
"loss": 0.7902, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.8364805142627562, |
|
"grad_norm": 0.6343231201171875, |
|
"learning_rate": 9.056625717327438e-06, |
|
"loss": 0.8275, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.8388911209321013, |
|
"grad_norm": 0.6518645882606506, |
|
"learning_rate": 9.048388735126949e-06, |
|
"loss": 0.8286, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.8413017276014464, |
|
"grad_norm": 0.6569293737411499, |
|
"learning_rate": 9.04011972894537e-06, |
|
"loss": 0.8504, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.8437123342707915, |
|
"grad_norm": 0.5841959118843079, |
|
"learning_rate": 9.03181876419305e-06, |
|
"loss": 0.8606, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8461229409401366, |
|
"grad_norm": 0.667956531047821, |
|
"learning_rate": 9.023485906533132e-06, |
|
"loss": 0.8094, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.8485335476094817, |
|
"grad_norm": 0.7535767555236816, |
|
"learning_rate": 9.01512122188104e-06, |
|
"loss": 0.8311, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.8509441542788269, |
|
"grad_norm": 0.6863349080085754, |
|
"learning_rate": 9.00672477640397e-06, |
|
"loss": 0.8607, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.853354760948172, |
|
"grad_norm": 0.674609899520874, |
|
"learning_rate": 8.998296636520342e-06, |
|
"loss": 0.8651, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.8557653676175171, |
|
"grad_norm": 0.7082617282867432, |
|
"learning_rate": 8.989836868899303e-06, |
|
"loss": 0.8381, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.8581759742868622, |
|
"grad_norm": 0.7260697484016418, |
|
"learning_rate": 8.98134554046017e-06, |
|
"loss": 0.8422, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.8605865809562073, |
|
"grad_norm": 0.7050870656967163, |
|
"learning_rate": 8.972822718371926e-06, |
|
"loss": 0.8241, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.8629971876255524, |
|
"grad_norm": 0.7213965654373169, |
|
"learning_rate": 8.964268470052676e-06, |
|
"loss": 0.8212, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.8654077942948976, |
|
"grad_norm": 0.6910438537597656, |
|
"learning_rate": 8.955682863169113e-06, |
|
"loss": 0.8364, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.8678184009642427, |
|
"grad_norm": 0.5793609023094177, |
|
"learning_rate": 8.94706596563599e-06, |
|
"loss": 0.8222, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8702290076335878, |
|
"grad_norm": 0.6951830983161926, |
|
"learning_rate": 8.938417845615575e-06, |
|
"loss": 0.8129, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.8726396143029329, |
|
"grad_norm": 0.6757342219352722, |
|
"learning_rate": 8.929738571517117e-06, |
|
"loss": 0.836, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.875050220972278, |
|
"grad_norm": 0.665619432926178, |
|
"learning_rate": 8.921028211996304e-06, |
|
"loss": 0.8026, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8774608276416231, |
|
"grad_norm": 0.6058963537216187, |
|
"learning_rate": 8.912286835954716e-06, |
|
"loss": 0.826, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8798714343109683, |
|
"grad_norm": 0.6532434225082397, |
|
"learning_rate": 8.903514512539286e-06, |
|
"loss": 0.8734, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8822820409803134, |
|
"grad_norm": 0.6264218688011169, |
|
"learning_rate": 8.89471131114175e-06, |
|
"loss": 0.8274, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.8846926476496585, |
|
"grad_norm": 0.6239394545555115, |
|
"learning_rate": 8.885877301398098e-06, |
|
"loss": 0.8467, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8871032543190036, |
|
"grad_norm": 0.5937274098396301, |
|
"learning_rate": 8.87701255318802e-06, |
|
"loss": 0.8086, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.8895138609883487, |
|
"grad_norm": 0.6665049195289612, |
|
"learning_rate": 8.868117136634362e-06, |
|
"loss": 0.8333, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8919244676576938, |
|
"grad_norm": 0.6695848107337952, |
|
"learning_rate": 8.859191122102561e-06, |
|
"loss": 0.8398, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.894335074327039, |
|
"grad_norm": 0.6585851311683655, |
|
"learning_rate": 8.850234580200098e-06, |
|
"loss": 0.8467, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8967456809963841, |
|
"grad_norm": 0.5577541589736938, |
|
"learning_rate": 8.84124758177593e-06, |
|
"loss": 0.8054, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8991562876657292, |
|
"grad_norm": 0.6449745893478394, |
|
"learning_rate": 8.832230197919938e-06, |
|
"loss": 0.8025, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.9015668943350743, |
|
"grad_norm": 0.6161853671073914, |
|
"learning_rate": 8.823182499962359e-06, |
|
"loss": 0.8204, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.9039775010044194, |
|
"grad_norm": 0.6511313915252686, |
|
"learning_rate": 8.814104559473224e-06, |
|
"loss": 0.8105, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.9063881076737645, |
|
"grad_norm": 0.5244183540344238, |
|
"learning_rate": 8.804996448261791e-06, |
|
"loss": 0.8188, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.9087987143431097, |
|
"grad_norm": 0.6332988142967224, |
|
"learning_rate": 8.795858238375981e-06, |
|
"loss": 0.8425, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.9112093210124548, |
|
"grad_norm": 0.7205396294593811, |
|
"learning_rate": 8.7866900021018e-06, |
|
"loss": 0.8486, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.9136199276817999, |
|
"grad_norm": 0.5268656611442566, |
|
"learning_rate": 8.777491811962774e-06, |
|
"loss": 0.818, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.916030534351145, |
|
"grad_norm": 0.6341696977615356, |
|
"learning_rate": 8.768263740719375e-06, |
|
"loss": 0.8279, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9184411410204901, |
|
"grad_norm": 0.562234103679657, |
|
"learning_rate": 8.759005861368437e-06, |
|
"loss": 0.8424, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.9208517476898352, |
|
"grad_norm": 0.7172342538833618, |
|
"learning_rate": 8.749718247142594e-06, |
|
"loss": 0.8401, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.9232623543591804, |
|
"grad_norm": 0.5809625387191772, |
|
"learning_rate": 8.740400971509686e-06, |
|
"loss": 0.8562, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.9256729610285255, |
|
"grad_norm": 0.5465041995048523, |
|
"learning_rate": 8.731054108172185e-06, |
|
"loss": 0.8593, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.9280835676978706, |
|
"grad_norm": 0.5923233032226562, |
|
"learning_rate": 8.721677731066612e-06, |
|
"loss": 0.8146, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.9304941743672157, |
|
"grad_norm": 0.5705261826515198, |
|
"learning_rate": 8.71227191436295e-06, |
|
"loss": 0.812, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.9329047810365608, |
|
"grad_norm": 0.5386529564857483, |
|
"learning_rate": 8.702836732464054e-06, |
|
"loss": 0.8563, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.9353153877059059, |
|
"grad_norm": 0.6412521600723267, |
|
"learning_rate": 8.693372260005078e-06, |
|
"loss": 0.8344, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.9377259943752511, |
|
"grad_norm": 0.5760740637779236, |
|
"learning_rate": 8.68387857185286e-06, |
|
"loss": 0.8414, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.9401366010445962, |
|
"grad_norm": 0.5588881373405457, |
|
"learning_rate": 8.674355743105352e-06, |
|
"loss": 0.791, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9425472077139413, |
|
"grad_norm": 0.6648942828178406, |
|
"learning_rate": 8.66480384909101e-06, |
|
"loss": 0.8229, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.9449578143832864, |
|
"grad_norm": 0.6803313493728638, |
|
"learning_rate": 8.655222965368214e-06, |
|
"loss": 0.8168, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.5815809369087219, |
|
"learning_rate": 8.64561316772465e-06, |
|
"loss": 0.8507, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.9497790277219766, |
|
"grad_norm": 0.5295128226280212, |
|
"learning_rate": 8.635974532176732e-06, |
|
"loss": 0.8116, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.9521896343913219, |
|
"grad_norm": 0.6963701844215393, |
|
"learning_rate": 8.626307134968986e-06, |
|
"loss": 0.8203, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.954600241060667, |
|
"grad_norm": 0.5878770351409912, |
|
"learning_rate": 8.616611052573451e-06, |
|
"loss": 0.824, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.9570108477300121, |
|
"grad_norm": 0.6232076287269592, |
|
"learning_rate": 8.606886361689078e-06, |
|
"loss": 0.7862, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.9594214543993572, |
|
"grad_norm": 0.5721035599708557, |
|
"learning_rate": 8.597133139241113e-06, |
|
"loss": 0.8339, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.9618320610687023, |
|
"grad_norm": 0.5648634433746338, |
|
"learning_rate": 8.587351462380506e-06, |
|
"loss": 0.8406, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.9642426677380475, |
|
"grad_norm": 0.7098273038864136, |
|
"learning_rate": 8.577541408483278e-06, |
|
"loss": 0.8228, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9666532744073926, |
|
"grad_norm": 0.6049249172210693, |
|
"learning_rate": 8.567703055149928e-06, |
|
"loss": 0.8137, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.9690638810767377, |
|
"grad_norm": 0.6870529651641846, |
|
"learning_rate": 8.557836480204812e-06, |
|
"loss": 0.8396, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.9714744877460828, |
|
"grad_norm": 0.6321452856063843, |
|
"learning_rate": 8.547941761695525e-06, |
|
"loss": 0.8142, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.9738850944154279, |
|
"grad_norm": 0.5478300452232361, |
|
"learning_rate": 8.538018977892288e-06, |
|
"loss": 0.8289, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.976295701084773, |
|
"grad_norm": 0.6476640105247498, |
|
"learning_rate": 8.528068207287327e-06, |
|
"loss": 0.8766, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9787063077541182, |
|
"grad_norm": 0.6451775431632996, |
|
"learning_rate": 8.51808952859425e-06, |
|
"loss": 0.8072, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.9811169144234633, |
|
"grad_norm": 0.6655381321907043, |
|
"learning_rate": 8.50808302074743e-06, |
|
"loss": 0.8055, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9835275210928084, |
|
"grad_norm": 0.6143584251403809, |
|
"learning_rate": 8.498048762901371e-06, |
|
"loss": 0.8118, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9859381277621535, |
|
"grad_norm": 0.7594701647758484, |
|
"learning_rate": 8.487986834430096e-06, |
|
"loss": 0.845, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9883487344314986, |
|
"grad_norm": 0.7051988244056702, |
|
"learning_rate": 8.477897314926505e-06, |
|
"loss": 0.8328, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9907593411008437, |
|
"grad_norm": 0.575385332107544, |
|
"learning_rate": 8.467780284201752e-06, |
|
"loss": 0.8059, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.9931699477701889, |
|
"grad_norm": 0.732379138469696, |
|
"learning_rate": 8.457635822284617e-06, |
|
"loss": 0.8285, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.995580554439534, |
|
"grad_norm": 0.6589808464050293, |
|
"learning_rate": 8.447464009420863e-06, |
|
"loss": 0.8257, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9979911611088791, |
|
"grad_norm": 0.6496813893318176, |
|
"learning_rate": 8.437264926072615e-06, |
|
"loss": 0.8677, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.0008035355564484, |
|
"grad_norm": 0.8550748825073242, |
|
"learning_rate": 8.427038652917707e-06, |
|
"loss": 1.0063, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.0032141422257934, |
|
"grad_norm": 0.575027585029602, |
|
"learning_rate": 8.41678527084906e-06, |
|
"loss": 0.7551, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.0056247488951386, |
|
"grad_norm": 0.8211678266525269, |
|
"learning_rate": 8.406504860974031e-06, |
|
"loss": 0.7853, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.0080353555644836, |
|
"grad_norm": 0.6503652930259705, |
|
"learning_rate": 8.396197504613774e-06, |
|
"loss": 0.8817, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.0104459622338289, |
|
"grad_norm": 0.5204470753669739, |
|
"learning_rate": 8.385863283302603e-06, |
|
"loss": 0.6226, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.0128565689031739, |
|
"grad_norm": 0.8931419253349304, |
|
"learning_rate": 8.375502278787337e-06, |
|
"loss": 0.8863, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.015267175572519, |
|
"grad_norm": 0.5655834078788757, |
|
"learning_rate": 8.365114573026658e-06, |
|
"loss": 0.7197, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.0176777822418641, |
|
"grad_norm": 0.7451320886611938, |
|
"learning_rate": 8.354700248190469e-06, |
|
"loss": 0.8099, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.0200883889112093, |
|
"grad_norm": 0.7233074903488159, |
|
"learning_rate": 8.344259386659231e-06, |
|
"loss": 0.7636, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.0224989955805543, |
|
"grad_norm": 0.5538437962532043, |
|
"learning_rate": 8.33379207102332e-06, |
|
"loss": 0.7604, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.0249096022498996, |
|
"grad_norm": 0.7776824235916138, |
|
"learning_rate": 8.323298384082377e-06, |
|
"loss": 0.7708, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.0273202089192446, |
|
"grad_norm": 0.6362035870552063, |
|
"learning_rate": 8.312778408844642e-06, |
|
"loss": 0.7616, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.0297308155885898, |
|
"grad_norm": 0.7121257185935974, |
|
"learning_rate": 8.302232228526303e-06, |
|
"loss": 0.7419, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.0321414222579348, |
|
"grad_norm": 0.8088518381118774, |
|
"learning_rate": 8.291659926550845e-06, |
|
"loss": 0.8936, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.03455202892728, |
|
"grad_norm": 0.5297755002975464, |
|
"learning_rate": 8.281061586548375e-06, |
|
"loss": 0.7147, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.036962635596625, |
|
"grad_norm": 0.7757960557937622, |
|
"learning_rate": 8.270437292354974e-06, |
|
"loss": 0.8377, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.0393732422659703, |
|
"grad_norm": 0.5733137130737305, |
|
"learning_rate": 8.25978712801203e-06, |
|
"loss": 0.7268, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.0417838489353153, |
|
"grad_norm": 0.5540242195129395, |
|
"learning_rate": 8.249111177765562e-06, |
|
"loss": 0.8372, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.0441944556046605, |
|
"grad_norm": 0.6394256949424744, |
|
"learning_rate": 8.238409526065575e-06, |
|
"loss": 0.737, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.0466050622740055, |
|
"grad_norm": 0.5981785655021667, |
|
"learning_rate": 8.227682257565375e-06, |
|
"loss": 0.835, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.0490156689433507, |
|
"grad_norm": 0.6546509861946106, |
|
"learning_rate": 8.216929457120903e-06, |
|
"loss": 0.7222, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.051426275612696, |
|
"grad_norm": 0.6063547134399414, |
|
"learning_rate": 8.20615120979007e-06, |
|
"loss": 0.8605, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.053836882282041, |
|
"grad_norm": 0.4858166575431824, |
|
"learning_rate": 8.195347600832073e-06, |
|
"loss": 0.7153, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.0562474889513862, |
|
"grad_norm": 0.7449772357940674, |
|
"learning_rate": 8.184518715706737e-06, |
|
"loss": 0.8272, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.0586580956207312, |
|
"grad_norm": 0.6343402862548828, |
|
"learning_rate": 8.173664640073817e-06, |
|
"loss": 0.8806, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.0610687022900764, |
|
"grad_norm": 0.5715517997741699, |
|
"learning_rate": 8.162785459792343e-06, |
|
"loss": 0.7498, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.0634793089594214, |
|
"grad_norm": 0.6444422006607056, |
|
"learning_rate": 8.151881260919924e-06, |
|
"loss": 0.6873, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.0658899156287667, |
|
"grad_norm": 0.7140711545944214, |
|
"learning_rate": 8.140952129712077e-06, |
|
"loss": 0.9237, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.0683005222981117, |
|
"grad_norm": 0.6303651928901672, |
|
"learning_rate": 8.129998152621539e-06, |
|
"loss": 0.7731, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.070711128967457, |
|
"grad_norm": 0.8040685057640076, |
|
"learning_rate": 8.119019416297586e-06, |
|
"loss": 0.8514, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.073121735636802, |
|
"grad_norm": 0.5904982686042786, |
|
"learning_rate": 8.108016007585353e-06, |
|
"loss": 0.7552, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.0755323423061471, |
|
"grad_norm": 0.6957026124000549, |
|
"learning_rate": 8.096988013525127e-06, |
|
"loss": 0.7255, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.0779429489754921, |
|
"grad_norm": 0.8129993081092834, |
|
"learning_rate": 8.085935521351688e-06, |
|
"loss": 0.9519, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.0803535556448374, |
|
"grad_norm": 0.506014883518219, |
|
"learning_rate": 8.074858618493593e-06, |
|
"loss": 0.6602, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.0827641623141824, |
|
"grad_norm": 0.7163325548171997, |
|
"learning_rate": 8.063757392572501e-06, |
|
"loss": 0.804, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.0851747689835276, |
|
"grad_norm": 0.5649906992912292, |
|
"learning_rate": 8.05263193140247e-06, |
|
"loss": 0.7554, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0875853756528726, |
|
"grad_norm": 0.7463988065719604, |
|
"learning_rate": 8.041482322989268e-06, |
|
"loss": 0.8249, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.0899959823222178, |
|
"grad_norm": 0.6098147630691528, |
|
"learning_rate": 8.030308655529676e-06, |
|
"loss": 0.8185, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.0924065889915628, |
|
"grad_norm": 0.5633608102798462, |
|
"learning_rate": 8.019111017410785e-06, |
|
"loss": 0.787, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.094817195660908, |
|
"grad_norm": 0.6172834634780884, |
|
"learning_rate": 8.007889497209306e-06, |
|
"loss": 0.766, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.097227802330253, |
|
"grad_norm": 0.6198186278343201, |
|
"learning_rate": 7.996644183690863e-06, |
|
"loss": 0.7985, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.0996384089995983, |
|
"grad_norm": 0.605234682559967, |
|
"learning_rate": 7.985375165809291e-06, |
|
"loss": 0.8497, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.1020490156689433, |
|
"grad_norm": 0.5625818371772766, |
|
"learning_rate": 7.974082532705934e-06, |
|
"loss": 0.7294, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.1044596223382885, |
|
"grad_norm": 0.612360954284668, |
|
"learning_rate": 7.96276637370894e-06, |
|
"loss": 0.8164, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.1068702290076335, |
|
"grad_norm": 0.552894115447998, |
|
"learning_rate": 7.951426778332557e-06, |
|
"loss": 0.7126, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.1092808356769788, |
|
"grad_norm": 0.6905764937400818, |
|
"learning_rate": 7.940063836276417e-06, |
|
"loss": 0.8764, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.1116914423463238, |
|
"grad_norm": 0.5401421785354614, |
|
"learning_rate": 7.928677637424834e-06, |
|
"loss": 0.7787, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.114102049015669, |
|
"grad_norm": 0.6026103496551514, |
|
"learning_rate": 7.917268271846089e-06, |
|
"loss": 0.7603, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.116512655685014, |
|
"grad_norm": 0.7179415822029114, |
|
"learning_rate": 7.905835829791717e-06, |
|
"loss": 0.9309, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.1189232623543592, |
|
"grad_norm": 0.4643336236476898, |
|
"learning_rate": 7.894380401695804e-06, |
|
"loss": 0.6804, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.1213338690237042, |
|
"grad_norm": 0.8007449507713318, |
|
"learning_rate": 7.882902078174249e-06, |
|
"loss": 0.8848, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.1237444756930495, |
|
"grad_norm": 0.6069219708442688, |
|
"learning_rate": 7.871400950024074e-06, |
|
"loss": 0.7905, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.1261550823623945, |
|
"grad_norm": 0.6723071932792664, |
|
"learning_rate": 7.859877108222678e-06, |
|
"loss": 0.8149, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.1285656890317397, |
|
"grad_norm": 0.5855688452720642, |
|
"learning_rate": 7.848330643927146e-06, |
|
"loss": 0.7545, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.1309762957010847, |
|
"grad_norm": 0.666085422039032, |
|
"learning_rate": 7.836761648473509e-06, |
|
"loss": 0.7747, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.13338690237043, |
|
"grad_norm": 0.5126907825469971, |
|
"learning_rate": 7.825170213376021e-06, |
|
"loss": 0.7419, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.135797509039775, |
|
"grad_norm": 0.7818092107772827, |
|
"learning_rate": 7.813556430326446e-06, |
|
"loss": 0.8286, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.1382081157091202, |
|
"grad_norm": 0.6784226298332214, |
|
"learning_rate": 7.801920391193327e-06, |
|
"loss": 0.766, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.1406187223784652, |
|
"grad_norm": 0.6822251677513123, |
|
"learning_rate": 7.790262188021257e-06, |
|
"loss": 0.822, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.1430293290478104, |
|
"grad_norm": 0.7290881276130676, |
|
"learning_rate": 7.77858191303015e-06, |
|
"loss": 0.8315, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.1454399357171554, |
|
"grad_norm": 0.6667381525039673, |
|
"learning_rate": 7.766879658614526e-06, |
|
"loss": 0.7708, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.1478505423865006, |
|
"grad_norm": 0.6019648909568787, |
|
"learning_rate": 7.755155517342759e-06, |
|
"loss": 0.7824, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.1502611490558456, |
|
"grad_norm": 0.5805454254150391, |
|
"learning_rate": 7.743409581956354e-06, |
|
"loss": 0.7515, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.1526717557251909, |
|
"grad_norm": 0.6192907691001892, |
|
"learning_rate": 7.731641945369223e-06, |
|
"loss": 0.834, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.1550823623945359, |
|
"grad_norm": 0.5683577656745911, |
|
"learning_rate": 7.719852700666932e-06, |
|
"loss": 0.7345, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.157492969063881, |
|
"grad_norm": 0.5678842663764954, |
|
"learning_rate": 7.70804194110598e-06, |
|
"loss": 0.7701, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.159903575733226, |
|
"grad_norm": 0.6107463836669922, |
|
"learning_rate": 7.696209760113051e-06, |
|
"loss": 0.8271, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.1623141824025713, |
|
"grad_norm": 0.5877980589866638, |
|
"learning_rate": 7.684356251284279e-06, |
|
"loss": 0.7114, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.1647247890719163, |
|
"grad_norm": 0.6016087532043457, |
|
"learning_rate": 7.672481508384512e-06, |
|
"loss": 0.8562, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.1671353957412616, |
|
"grad_norm": 0.643948495388031, |
|
"learning_rate": 7.66058562534656e-06, |
|
"loss": 0.7836, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.1695460024106066, |
|
"grad_norm": 0.5352935194969177, |
|
"learning_rate": 7.648668696270465e-06, |
|
"loss": 0.7657, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.1719566090799518, |
|
"grad_norm": 0.5975997447967529, |
|
"learning_rate": 7.636730815422741e-06, |
|
"loss": 0.7156, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.1743672157492968, |
|
"grad_norm": 0.6982394456863403, |
|
"learning_rate": 7.624772077235645e-06, |
|
"loss": 0.8539, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.176777822418642, |
|
"grad_norm": 0.5386956930160522, |
|
"learning_rate": 7.612792576306416e-06, |
|
"loss": 0.7653, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.1791884290879873, |
|
"grad_norm": 0.669564962387085, |
|
"learning_rate": 7.600792407396535e-06, |
|
"loss": 0.7075, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.1815990357573323, |
|
"grad_norm": 0.7186045050621033, |
|
"learning_rate": 7.588771665430972e-06, |
|
"loss": 0.8395, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.1840096424266773, |
|
"grad_norm": 0.6615277528762817, |
|
"learning_rate": 7.576730445497438e-06, |
|
"loss": 0.814, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.1864202490960225, |
|
"grad_norm": 0.5930035710334778, |
|
"learning_rate": 7.56466884284563e-06, |
|
"loss": 0.7371, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.1888308557653677, |
|
"grad_norm": 0.556384265422821, |
|
"learning_rate": 7.552586952886478e-06, |
|
"loss": 0.7922, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.1912414624347127, |
|
"grad_norm": 0.6335439085960388, |
|
"learning_rate": 7.5404848711913896e-06, |
|
"loss": 0.845, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.1936520691040577, |
|
"grad_norm": 0.5936745405197144, |
|
"learning_rate": 7.5283626934914975e-06, |
|
"loss": 0.7108, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.196062675773403, |
|
"grad_norm": 0.7709783315658569, |
|
"learning_rate": 7.5162205156769e-06, |
|
"loss": 0.9609, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.1984732824427482, |
|
"grad_norm": 0.5363426804542542, |
|
"learning_rate": 7.504058433795901e-06, |
|
"loss": 0.7275, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.2008838891120932, |
|
"grad_norm": 0.5607777237892151, |
|
"learning_rate": 7.491876544054251e-06, |
|
"loss": 0.7118, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.2032944957814382, |
|
"grad_norm": 0.6465369462966919, |
|
"learning_rate": 7.4796749428143854e-06, |
|
"loss": 0.7399, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.2057051024507834, |
|
"grad_norm": 0.6445679068565369, |
|
"learning_rate": 7.4674537265946715e-06, |
|
"loss": 0.7651, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.2081157091201287, |
|
"grad_norm": 0.6330168843269348, |
|
"learning_rate": 7.455212992068626e-06, |
|
"loss": 0.8275, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.5210802555084229, |
|
"learning_rate": 7.442952836064169e-06, |
|
"loss": 0.7289, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.2129369224588187, |
|
"grad_norm": 0.6585263013839722, |
|
"learning_rate": 7.430673355562847e-06, |
|
"loss": 0.793, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.215347529128164, |
|
"grad_norm": 0.6018887162208557, |
|
"learning_rate": 7.418374647699069e-06, |
|
"loss": 0.7905, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.2177581357975091, |
|
"grad_norm": 0.6504072546958923, |
|
"learning_rate": 7.406056809759342e-06, |
|
"loss": 0.8256, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.2201687424668541, |
|
"grad_norm": 0.730323076248169, |
|
"learning_rate": 7.393719939181494e-06, |
|
"loss": 0.824, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.2225793491361994, |
|
"grad_norm": 0.6128969788551331, |
|
"learning_rate": 7.381364133553907e-06, |
|
"loss": 0.7426, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.2249899558055444, |
|
"grad_norm": 0.6733986139297485, |
|
"learning_rate": 7.368989490614744e-06, |
|
"loss": 0.7641, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.2274005624748896, |
|
"grad_norm": 0.6309253573417664, |
|
"learning_rate": 7.356596108251184e-06, |
|
"loss": 0.804, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.2298111691442346, |
|
"grad_norm": 0.6509778499603271, |
|
"learning_rate": 7.3441840844986316e-06, |
|
"loss": 0.7067, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.2322217758135798, |
|
"grad_norm": 0.5834366679191589, |
|
"learning_rate": 7.331753517539954e-06, |
|
"loss": 0.8313, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.2346323824829248, |
|
"grad_norm": 0.5802374482154846, |
|
"learning_rate": 7.319304505704703e-06, |
|
"loss": 0.7689, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.23704298915227, |
|
"grad_norm": 0.5493485331535339, |
|
"learning_rate": 7.306837147468331e-06, |
|
"loss": 0.8492, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.239453595821615, |
|
"grad_norm": 0.5775658488273621, |
|
"learning_rate": 7.294351541451419e-06, |
|
"loss": 0.7663, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.2418642024909603, |
|
"grad_norm": 0.5747280716896057, |
|
"learning_rate": 7.28184778641889e-06, |
|
"loss": 0.7991, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.2442748091603053, |
|
"grad_norm": 0.6496589779853821, |
|
"learning_rate": 7.269325981279236e-06, |
|
"loss": 0.7624, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.2466854158296505, |
|
"grad_norm": 0.6285122632980347, |
|
"learning_rate": 7.256786225083726e-06, |
|
"loss": 0.876, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.2490960224989955, |
|
"grad_norm": 0.506952702999115, |
|
"learning_rate": 7.244228617025633e-06, |
|
"loss": 0.6533, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.2515066291683408, |
|
"grad_norm": 0.5777001976966858, |
|
"learning_rate": 7.231653256439437e-06, |
|
"loss": 0.7933, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.2539172358376858, |
|
"grad_norm": 0.646965503692627, |
|
"learning_rate": 7.219060242800051e-06, |
|
"loss": 0.8577, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.256327842507031, |
|
"grad_norm": 0.6504207253456116, |
|
"learning_rate": 7.2064496757220245e-06, |
|
"loss": 0.8778, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.258738449176376, |
|
"grad_norm": 0.5061886310577393, |
|
"learning_rate": 7.193821654958767e-06, |
|
"loss": 0.7084, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.2611490558457212, |
|
"grad_norm": 0.6259862780570984, |
|
"learning_rate": 7.181176280401745e-06, |
|
"loss": 0.775, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.2635596625150662, |
|
"grad_norm": 0.6118078827857971, |
|
"learning_rate": 7.168513652079701e-06, |
|
"loss": 0.8755, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.2659702691844115, |
|
"grad_norm": 0.54640132188797, |
|
"learning_rate": 7.15583387015786e-06, |
|
"loss": 0.7318, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.2683808758537565, |
|
"grad_norm": 0.5238545536994934, |
|
"learning_rate": 7.1431370349371375e-06, |
|
"loss": 0.7411, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.2707914825231017, |
|
"grad_norm": 0.546366274356842, |
|
"learning_rate": 7.130423246853349e-06, |
|
"loss": 0.8222, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.2732020891924467, |
|
"grad_norm": 0.5696098208427429, |
|
"learning_rate": 7.117692606476405e-06, |
|
"loss": 0.7325, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.275612695861792, |
|
"grad_norm": 0.5879912972450256, |
|
"learning_rate": 7.1049452145095295e-06, |
|
"loss": 0.8107, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.278023302531137, |
|
"grad_norm": 0.6207639575004578, |
|
"learning_rate": 7.092181171788452e-06, |
|
"loss": 0.8618, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.2804339092004822, |
|
"grad_norm": 0.5925825834274292, |
|
"learning_rate": 7.079400579280618e-06, |
|
"loss": 0.7505, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.2828445158698272, |
|
"grad_norm": 0.529080331325531, |
|
"learning_rate": 7.066603538084384e-06, |
|
"loss": 0.7533, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.2852551225391724, |
|
"grad_norm": 0.6177625060081482, |
|
"learning_rate": 7.053790149428223e-06, |
|
"loss": 0.8435, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.2876657292085174, |
|
"grad_norm": 0.5700219869613647, |
|
"learning_rate": 7.040960514669919e-06, |
|
"loss": 0.7714, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.2900763358778626, |
|
"grad_norm": 0.5268744230270386, |
|
"learning_rate": 7.02811473529577e-06, |
|
"loss": 0.7789, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.2924869425472076, |
|
"grad_norm": 0.585462749004364, |
|
"learning_rate": 7.015252912919781e-06, |
|
"loss": 0.7844, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.2948975492165529, |
|
"grad_norm": 0.5214054584503174, |
|
"learning_rate": 7.002375149282863e-06, |
|
"loss": 0.7395, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.2973081558858979, |
|
"grad_norm": 0.5377326607704163, |
|
"learning_rate": 6.989481546252027e-06, |
|
"loss": 0.7834, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.299718762555243, |
|
"grad_norm": 0.6663128137588501, |
|
"learning_rate": 6.976572205819577e-06, |
|
"loss": 0.9486, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.302129369224588, |
|
"grad_norm": 0.4820623993873596, |
|
"learning_rate": 6.9636472301023065e-06, |
|
"loss": 0.7593, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.3045399758939333, |
|
"grad_norm": 0.5082680583000183, |
|
"learning_rate": 6.95070672134069e-06, |
|
"loss": 0.7489, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.3069505825632786, |
|
"grad_norm": 0.5641727447509766, |
|
"learning_rate": 6.937750781898068e-06, |
|
"loss": 0.7084, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.3093611892326236, |
|
"grad_norm": 0.5218797326087952, |
|
"learning_rate": 6.924779514259849e-06, |
|
"loss": 0.8043, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.3117717959019686, |
|
"grad_norm": 0.5321512222290039, |
|
"learning_rate": 6.911793021032687e-06, |
|
"loss": 0.8391, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.3141824025713138, |
|
"grad_norm": 0.6125780344009399, |
|
"learning_rate": 6.898791404943674e-06, |
|
"loss": 0.8612, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.316593009240659, |
|
"grad_norm": 0.5522393584251404, |
|
"learning_rate": 6.885774768839537e-06, |
|
"loss": 0.8248, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.319003615910004, |
|
"grad_norm": 0.4764183759689331, |
|
"learning_rate": 6.872743215685808e-06, |
|
"loss": 0.6839, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.321414222579349, |
|
"grad_norm": 0.5523101091384888, |
|
"learning_rate": 6.859696848566017e-06, |
|
"loss": 0.7667, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.3238248292486943, |
|
"grad_norm": 0.5903946757316589, |
|
"learning_rate": 6.846635770680883e-06, |
|
"loss": 0.7859, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.3262354359180395, |
|
"grad_norm": 0.5406984090805054, |
|
"learning_rate": 6.8335600853474835e-06, |
|
"loss": 0.7954, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.3286460425873845, |
|
"grad_norm": 0.6634337306022644, |
|
"learning_rate": 6.820469895998455e-06, |
|
"loss": 0.8073, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.3310566492567295, |
|
"grad_norm": 0.5275270938873291, |
|
"learning_rate": 6.807365306181157e-06, |
|
"loss": 0.7806, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.3334672559260747, |
|
"grad_norm": 0.5928820967674255, |
|
"learning_rate": 6.794246419556866e-06, |
|
"loss": 0.7392, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.33587786259542, |
|
"grad_norm": 0.518025815486908, |
|
"learning_rate": 6.781113339899947e-06, |
|
"loss": 0.7879, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.338288469264765, |
|
"grad_norm": 0.5962988138198853, |
|
"learning_rate": 6.767966171097038e-06, |
|
"loss": 0.7913, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.34069907593411, |
|
"grad_norm": 0.5495813488960266, |
|
"learning_rate": 6.7548050171462275e-06, |
|
"loss": 0.7647, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.3431096826034552, |
|
"grad_norm": 0.5601335763931274, |
|
"learning_rate": 6.741629982156228e-06, |
|
"loss": 0.7954, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.3455202892728004, |
|
"grad_norm": 0.5570335388183594, |
|
"learning_rate": 6.728441170345559e-06, |
|
"loss": 0.762, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.3479308959421454, |
|
"grad_norm": 0.5548774600028992, |
|
"learning_rate": 6.715238686041714e-06, |
|
"loss": 0.7679, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.3503415026114904, |
|
"grad_norm": 0.6466426253318787, |
|
"learning_rate": 6.702022633680345e-06, |
|
"loss": 0.8448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.3527521092808357, |
|
"grad_norm": 0.5387895703315735, |
|
"learning_rate": 6.688793117804428e-06, |
|
"loss": 0.7477, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.355162715950181, |
|
"grad_norm": 0.6392855048179626, |
|
"learning_rate": 6.67555024306344e-06, |
|
"loss": 0.7822, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.357573322619526, |
|
"grad_norm": 0.5825173854827881, |
|
"learning_rate": 6.662294114212533e-06, |
|
"loss": 0.7774, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.359983929288871, |
|
"grad_norm": 0.5614640712738037, |
|
"learning_rate": 6.649024836111698e-06, |
|
"loss": 0.8526, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.3623945359582161, |
|
"grad_norm": 0.6232070922851562, |
|
"learning_rate": 6.635742513724948e-06, |
|
"loss": 0.7779, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.3648051426275614, |
|
"grad_norm": 0.6508151888847351, |
|
"learning_rate": 6.6224472521194725e-06, |
|
"loss": 0.7132, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.3672157492969064, |
|
"grad_norm": 0.523857057094574, |
|
"learning_rate": 6.609139156464821e-06, |
|
"loss": 0.7714, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.3696263559662514, |
|
"grad_norm": 0.6634368896484375, |
|
"learning_rate": 6.595818332032058e-06, |
|
"loss": 0.8518, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.3720369626355966, |
|
"grad_norm": 0.6455778479576111, |
|
"learning_rate": 6.582484884192941e-06, |
|
"loss": 0.7877, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.3744475693049418, |
|
"grad_norm": 0.5190636515617371, |
|
"learning_rate": 6.56913891841908e-06, |
|
"loss": 0.7254, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.3768581759742868, |
|
"grad_norm": 0.6370989084243774, |
|
"learning_rate": 6.555780540281109e-06, |
|
"loss": 0.761, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.379268782643632, |
|
"grad_norm": 0.71718430519104, |
|
"learning_rate": 6.5424098554478425e-06, |
|
"loss": 0.7352, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.381679389312977, |
|
"grad_norm": 0.6870063543319702, |
|
"learning_rate": 6.529026969685446e-06, |
|
"loss": 0.8534, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.3840899959823223, |
|
"grad_norm": 0.5976680517196655, |
|
"learning_rate": 6.515631988856603e-06, |
|
"loss": 0.78, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.3865006026516673, |
|
"grad_norm": 0.6401963829994202, |
|
"learning_rate": 6.502225018919668e-06, |
|
"loss": 0.7515, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.3889112093210125, |
|
"grad_norm": 0.5665940046310425, |
|
"learning_rate": 6.488806165927832e-06, |
|
"loss": 0.8214, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.3913218159903575, |
|
"grad_norm": 0.49113357067108154, |
|
"learning_rate": 6.47537553602829e-06, |
|
"loss": 0.7026, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.3937324226597028, |
|
"grad_norm": 0.6468653082847595, |
|
"learning_rate": 6.461933235461388e-06, |
|
"loss": 0.8477, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.3961430293290478, |
|
"grad_norm": 0.5712079405784607, |
|
"learning_rate": 6.448479370559797e-06, |
|
"loss": 0.756, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.398553635998393, |
|
"grad_norm": 0.4885547459125519, |
|
"learning_rate": 6.435014047747665e-06, |
|
"loss": 0.7306, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.400964242667738, |
|
"grad_norm": 0.5576856136322021, |
|
"learning_rate": 6.421537373539772e-06, |
|
"loss": 0.7304, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.4033748493370832, |
|
"grad_norm": 0.6850072145462036, |
|
"learning_rate": 6.408049454540692e-06, |
|
"loss": 0.7706, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.4057854560064282, |
|
"grad_norm": 0.6012528538703918, |
|
"learning_rate": 6.394550397443953e-06, |
|
"loss": 0.8947, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.4081960626757735, |
|
"grad_norm": 0.7243214845657349, |
|
"learning_rate": 6.381040309031183e-06, |
|
"loss": 0.7922, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.4106066693451185, |
|
"grad_norm": 0.6892761588096619, |
|
"learning_rate": 6.367519296171272e-06, |
|
"loss": 0.7648, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.4130172760144637, |
|
"grad_norm": 0.6180418729782104, |
|
"learning_rate": 6.353987465819529e-06, |
|
"loss": 0.7559, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.4154278826838087, |
|
"grad_norm": 0.7062158584594727, |
|
"learning_rate": 6.34044492501683e-06, |
|
"loss": 0.7318, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.417838489353154, |
|
"grad_norm": 0.7485331296920776, |
|
"learning_rate": 6.326891780888775e-06, |
|
"loss": 0.7661, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.420249096022499, |
|
"grad_norm": 0.5578104853630066, |
|
"learning_rate": 6.313328140644839e-06, |
|
"loss": 0.8575, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.4226597026918442, |
|
"grad_norm": 0.5605173110961914, |
|
"learning_rate": 6.299754111577524e-06, |
|
"loss": 0.7581, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.4250703093611892, |
|
"grad_norm": 0.6252680420875549, |
|
"learning_rate": 6.286169801061512e-06, |
|
"loss": 0.7497, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.4274809160305344, |
|
"grad_norm": 0.6541606187820435, |
|
"learning_rate": 6.2725753165528115e-06, |
|
"loss": 0.7644, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.4298915226998794, |
|
"grad_norm": 0.5784509778022766, |
|
"learning_rate": 6.2589707655879165e-06, |
|
"loss": 0.7891, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.4323021293692246, |
|
"grad_norm": 0.566062867641449, |
|
"learning_rate": 6.245356255782942e-06, |
|
"loss": 0.767, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.4347127360385696, |
|
"grad_norm": 0.5759670734405518, |
|
"learning_rate": 6.2317318948327865e-06, |
|
"loss": 0.7875, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.4371233427079149, |
|
"grad_norm": 0.6124853491783142, |
|
"learning_rate": 6.21809779051027e-06, |
|
"loss": 0.6961, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.4395339493772599, |
|
"grad_norm": 0.6298825740814209, |
|
"learning_rate": 6.20445405066529e-06, |
|
"loss": 0.8353, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.441944556046605, |
|
"grad_norm": 0.5451638698577881, |
|
"learning_rate": 6.190800783223959e-06, |
|
"loss": 0.6963, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.44435516271595, |
|
"grad_norm": 0.7436325550079346, |
|
"learning_rate": 6.1771380961877565e-06, |
|
"loss": 0.8432, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.4467657693852953, |
|
"grad_norm": 0.5576462745666504, |
|
"learning_rate": 6.163466097632677e-06, |
|
"loss": 0.819, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.4491763760546403, |
|
"grad_norm": 0.6428177952766418, |
|
"learning_rate": 6.14978489570837e-06, |
|
"loss": 0.7632, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.4515869827239856, |
|
"grad_norm": 0.5757659077644348, |
|
"learning_rate": 6.136094598637288e-06, |
|
"loss": 0.7093, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.4539975893933308, |
|
"grad_norm": 0.6133916974067688, |
|
"learning_rate": 6.122395314713825e-06, |
|
"loss": 0.8834, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.4564081960626758, |
|
"grad_norm": 0.5794780254364014, |
|
"learning_rate": 6.108687152303468e-06, |
|
"loss": 0.7203, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.4588188027320208, |
|
"grad_norm": 0.5581604838371277, |
|
"learning_rate": 6.094970219841929e-06, |
|
"loss": 0.8264, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.461229409401366, |
|
"grad_norm": 0.6359495520591736, |
|
"learning_rate": 6.081244625834305e-06, |
|
"loss": 0.85, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.4636400160707113, |
|
"grad_norm": 0.5560900568962097, |
|
"learning_rate": 6.067510478854198e-06, |
|
"loss": 0.7037, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.4660506227400563, |
|
"grad_norm": 0.5877501368522644, |
|
"learning_rate": 6.0537678875428715e-06, |
|
"loss": 0.8572, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.4684612294094013, |
|
"grad_norm": 0.5356528162956238, |
|
"learning_rate": 6.040016960608385e-06, |
|
"loss": 0.7257, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.4708718360787465, |
|
"grad_norm": 0.5595106482505798, |
|
"learning_rate": 6.026257806824735e-06, |
|
"loss": 0.7412, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.4732824427480917, |
|
"grad_norm": 0.5976831912994385, |
|
"learning_rate": 6.012490535030997e-06, |
|
"loss": 0.7414, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.4756930494174367, |
|
"grad_norm": 0.6237283945083618, |
|
"learning_rate": 5.998715254130459e-06, |
|
"loss": 0.8383, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.4781036560867817, |
|
"grad_norm": 0.465260773897171, |
|
"learning_rate": 5.984932073089767e-06, |
|
"loss": 0.6901, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.480514262756127, |
|
"grad_norm": 0.546064019203186, |
|
"learning_rate": 5.971141100938054e-06, |
|
"loss": 0.8084, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.4829248694254722, |
|
"grad_norm": 0.5877063870429993, |
|
"learning_rate": 5.957342446766091e-06, |
|
"loss": 0.6977, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.4853354760948172, |
|
"grad_norm": 0.6162629723548889, |
|
"learning_rate": 5.94353621972541e-06, |
|
"loss": 0.858, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.4877460827641622, |
|
"grad_norm": 0.5107839703559875, |
|
"learning_rate": 5.92972252902745e-06, |
|
"loss": 0.768, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.4901566894335074, |
|
"grad_norm": 0.5261808037757874, |
|
"learning_rate": 5.915901483942687e-06, |
|
"loss": 0.7708, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.4925672961028527, |
|
"grad_norm": 0.687761127948761, |
|
"learning_rate": 5.902073193799772e-06, |
|
"loss": 0.7811, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.4949779027721977, |
|
"grad_norm": 0.5662958025932312, |
|
"learning_rate": 5.8882377679846725e-06, |
|
"loss": 0.8242, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.4973885094415427, |
|
"grad_norm": 0.6431335806846619, |
|
"learning_rate": 5.874395315939794e-06, |
|
"loss": 0.8113, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.499799116110888, |
|
"grad_norm": 0.5993348956108093, |
|
"learning_rate": 5.860545947163127e-06, |
|
"loss": 0.8181, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.5022097227802331, |
|
"grad_norm": 0.5593604445457458, |
|
"learning_rate": 5.846689771207372e-06, |
|
"loss": 0.7528, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.5046203294495781, |
|
"grad_norm": 0.6782893538475037, |
|
"learning_rate": 5.832826897679077e-06, |
|
"loss": 0.769, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.5070309361189231, |
|
"grad_norm": 0.6962941288948059, |
|
"learning_rate": 5.818957436237772e-06, |
|
"loss": 0.8636, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.5094415427882684, |
|
"grad_norm": 0.5692513585090637, |
|
"learning_rate": 5.805081496595098e-06, |
|
"loss": 0.7155, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.5118521494576136, |
|
"grad_norm": 0.666286051273346, |
|
"learning_rate": 5.791199188513939e-06, |
|
"loss": 0.8029, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.5142627561269586, |
|
"grad_norm": 0.6114779710769653, |
|
"learning_rate": 5.777310621807556e-06, |
|
"loss": 0.7359, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.5166733627963036, |
|
"grad_norm": 0.6151116490364075, |
|
"learning_rate": 5.7634159063387205e-06, |
|
"loss": 0.7971, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.5190839694656488, |
|
"grad_norm": 0.6167870163917542, |
|
"learning_rate": 5.749515152018839e-06, |
|
"loss": 0.7959, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.521494576134994, |
|
"grad_norm": 0.5813983678817749, |
|
"learning_rate": 5.73560846880709e-06, |
|
"loss": 0.817, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.523905182804339, |
|
"grad_norm": 0.5264800786972046, |
|
"learning_rate": 5.721695966709548e-06, |
|
"loss": 0.6862, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 0.5654107332229614, |
|
"learning_rate": 5.707777755778319e-06, |
|
"loss": 0.8241, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.5287263961430293, |
|
"grad_norm": 0.4881124496459961, |
|
"learning_rate": 5.693853946110667e-06, |
|
"loss": 0.7701, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.5311370028123745, |
|
"grad_norm": 0.6563966274261475, |
|
"learning_rate": 5.679924647848144e-06, |
|
"loss": 0.7822, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.5335476094817195, |
|
"grad_norm": 0.5599648356437683, |
|
"learning_rate": 5.6659899711757185e-06, |
|
"loss": 0.7897, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.5359582161510645, |
|
"grad_norm": 0.5359692573547363, |
|
"learning_rate": 5.652050026320903e-06, |
|
"loss": 0.8004, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.5383688228204098, |
|
"grad_norm": 0.537142276763916, |
|
"learning_rate": 5.638104923552884e-06, |
|
"loss": 0.7366, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.540779429489755, |
|
"grad_norm": 0.5490397810935974, |
|
"learning_rate": 5.624154773181647e-06, |
|
"loss": 0.7735, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.5431900361591, |
|
"grad_norm": 0.5084328651428223, |
|
"learning_rate": 5.610199685557109e-06, |
|
"loss": 0.7546, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.545600642828445, |
|
"grad_norm": 0.4906451106071472, |
|
"learning_rate": 5.596239771068238e-06, |
|
"loss": 0.789, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.5480112494977902, |
|
"grad_norm": 0.5476257801055908, |
|
"learning_rate": 5.5822751401421846e-06, |
|
"loss": 0.8137, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.5504218561671355, |
|
"grad_norm": 0.5136173963546753, |
|
"learning_rate": 5.568305903243413e-06, |
|
"loss": 0.7683, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.5528324628364805, |
|
"grad_norm": 0.5292182564735413, |
|
"learning_rate": 5.554332170872816e-06, |
|
"loss": 0.7771, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.5552430695058255, |
|
"grad_norm": 0.47428184747695923, |
|
"learning_rate": 5.540354053566848e-06, |
|
"loss": 0.6704, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.5576536761751707, |
|
"grad_norm": 0.5634832978248596, |
|
"learning_rate": 5.526371661896653e-06, |
|
"loss": 0.8604, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.560064282844516, |
|
"grad_norm": 0.5374214053153992, |
|
"learning_rate": 5.512385106467179e-06, |
|
"loss": 0.874, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.5624748895138612, |
|
"grad_norm": 0.49724236130714417, |
|
"learning_rate": 5.498394497916324e-06, |
|
"loss": 0.681, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.5648854961832062, |
|
"grad_norm": 0.5896291732788086, |
|
"learning_rate": 5.484399946914033e-06, |
|
"loss": 0.7843, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.5672961028525512, |
|
"grad_norm": 0.5508412718772888, |
|
"learning_rate": 5.470401564161445e-06, |
|
"loss": 0.7598, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.5697067095218964, |
|
"grad_norm": 0.5462619662284851, |
|
"learning_rate": 5.456399460390007e-06, |
|
"loss": 0.8246, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.5721173161912416, |
|
"grad_norm": 0.5629780888557434, |
|
"learning_rate": 5.4423937463606e-06, |
|
"loss": 0.7746, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.5745279228605866, |
|
"grad_norm": 0.5267663598060608, |
|
"learning_rate": 5.428384532862667e-06, |
|
"loss": 0.7844, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.5769385295299316, |
|
"grad_norm": 0.5778921246528625, |
|
"learning_rate": 5.4143719307133295e-06, |
|
"loss": 0.8168, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.5793491361992769, |
|
"grad_norm": 0.4362998902797699, |
|
"learning_rate": 5.400356050756511e-06, |
|
"loss": 0.6602, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.581759742868622, |
|
"grad_norm": 0.553729236125946, |
|
"learning_rate": 5.3863370038620696e-06, |
|
"loss": 0.8527, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.584170349537967, |
|
"grad_norm": 0.5067568421363831, |
|
"learning_rate": 5.372314900924913e-06, |
|
"loss": 0.7474, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.586580956207312, |
|
"grad_norm": 0.6317214965820312, |
|
"learning_rate": 5.358289852864123e-06, |
|
"loss": 0.829, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.5889915628766573, |
|
"grad_norm": 0.6612610816955566, |
|
"learning_rate": 5.344261970622075e-06, |
|
"loss": 0.7385, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.5914021695460026, |
|
"grad_norm": 0.5204635858535767, |
|
"learning_rate": 5.330231365163568e-06, |
|
"loss": 0.8214, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.5938127762153476, |
|
"grad_norm": 0.6329158544540405, |
|
"learning_rate": 5.31619814747494e-06, |
|
"loss": 0.8337, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.5962233828846926, |
|
"grad_norm": 0.700230598449707, |
|
"learning_rate": 5.302162428563196e-06, |
|
"loss": 0.7858, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.5986339895540378, |
|
"grad_norm": 0.5170446634292603, |
|
"learning_rate": 5.288124319455121e-06, |
|
"loss": 0.7758, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.601044596223383, |
|
"grad_norm": 0.46353042125701904, |
|
"learning_rate": 5.274083931196412e-06, |
|
"loss": 0.6741, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.603455202892728, |
|
"grad_norm": 0.7215187549591064, |
|
"learning_rate": 5.26004137485079e-06, |
|
"loss": 0.902, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.605865809562073, |
|
"grad_norm": 0.5355817675590515, |
|
"learning_rate": 5.24599676149913e-06, |
|
"loss": 0.6776, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.6082764162314183, |
|
"grad_norm": 0.5104895830154419, |
|
"learning_rate": 5.231950202238579e-06, |
|
"loss": 0.8331, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.6106870229007635, |
|
"grad_norm": 0.5898986458778381, |
|
"learning_rate": 5.217901808181675e-06, |
|
"loss": 0.7964, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.6130976295701085, |
|
"grad_norm": 0.6109344363212585, |
|
"learning_rate": 5.203851690455469e-06, |
|
"loss": 0.8243, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.6155082362394535, |
|
"grad_norm": 0.5583123564720154, |
|
"learning_rate": 5.1897999602006466e-06, |
|
"loss": 0.7745, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.6179188429087987, |
|
"grad_norm": 0.5470631122589111, |
|
"learning_rate": 5.175746728570653e-06, |
|
"loss": 0.8059, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.620329449578144, |
|
"grad_norm": 0.46003690361976624, |
|
"learning_rate": 5.161692106730806e-06, |
|
"loss": 0.7503, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.622740056247489, |
|
"grad_norm": 0.6411407589912415, |
|
"learning_rate": 5.1476362058574205e-06, |
|
"loss": 0.8173, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.625150662916834, |
|
"grad_norm": 0.6161395311355591, |
|
"learning_rate": 5.133579137136929e-06, |
|
"loss": 0.7971, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.6275612695861792, |
|
"grad_norm": 0.6249494552612305, |
|
"learning_rate": 5.119521011765004e-06, |
|
"loss": 0.7117, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.6299718762555244, |
|
"grad_norm": 0.47039860486984253, |
|
"learning_rate": 5.105461940945675e-06, |
|
"loss": 0.7917, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.6323824829248694, |
|
"grad_norm": 0.6689234972000122, |
|
"learning_rate": 5.09140203589045e-06, |
|
"loss": 0.862, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.6347930895942144, |
|
"grad_norm": 0.6127128601074219, |
|
"learning_rate": 5.077341407817439e-06, |
|
"loss": 0.6955, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.6372036962635597, |
|
"grad_norm": 0.5461022257804871, |
|
"learning_rate": 5.063280167950465e-06, |
|
"loss": 0.8663, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.6396143029329049, |
|
"grad_norm": 0.43847835063934326, |
|
"learning_rate": 5.049218427518195e-06, |
|
"loss": 0.7197, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.64202490960225, |
|
"grad_norm": 0.45636385679244995, |
|
"learning_rate": 5.035156297753258e-06, |
|
"loss": 0.7215, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.644435516271595, |
|
"grad_norm": 0.6869786381721497, |
|
"learning_rate": 5.021093889891357e-06, |
|
"loss": 0.8938, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.6468461229409401, |
|
"grad_norm": 0.5361312031745911, |
|
"learning_rate": 5.007031315170397e-06, |
|
"loss": 0.7364, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.6492567296102854, |
|
"grad_norm": 0.4618918001651764, |
|
"learning_rate": 4.992968684829605e-06, |
|
"loss": 0.7541, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.6516673362796304, |
|
"grad_norm": 0.5298300981521606, |
|
"learning_rate": 4.978906110108644e-06, |
|
"loss": 0.7157, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.6540779429489754, |
|
"grad_norm": 0.5889416337013245, |
|
"learning_rate": 4.964843702246744e-06, |
|
"loss": 0.8671, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.6564885496183206, |
|
"grad_norm": 0.5087608098983765, |
|
"learning_rate": 4.950781572481806e-06, |
|
"loss": 0.7043, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.6588991562876658, |
|
"grad_norm": 0.6054006218910217, |
|
"learning_rate": 4.936719832049537e-06, |
|
"loss": 0.8666, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.6613097629570108, |
|
"grad_norm": 0.49635258316993713, |
|
"learning_rate": 4.922658592182562e-06, |
|
"loss": 0.7715, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.6637203696263558, |
|
"grad_norm": 0.4960280656814575, |
|
"learning_rate": 4.908597964109551e-06, |
|
"loss": 0.8109, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.666130976295701, |
|
"grad_norm": 0.4894808828830719, |
|
"learning_rate": 4.894538059054325e-06, |
|
"loss": 0.7787, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.6685415829650463, |
|
"grad_norm": 0.5125415921211243, |
|
"learning_rate": 4.880478988234998e-06, |
|
"loss": 0.7929, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.6709521896343913, |
|
"grad_norm": 0.5139522552490234, |
|
"learning_rate": 4.8664208628630726e-06, |
|
"loss": 0.7566, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.6733627963037363, |
|
"grad_norm": 0.4818650186061859, |
|
"learning_rate": 4.852363794142582e-06, |
|
"loss": 0.7724, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.6757734029730815, |
|
"grad_norm": 0.5403794646263123, |
|
"learning_rate": 4.838307893269194e-06, |
|
"loss": 0.8026, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.6781840096424268, |
|
"grad_norm": 0.49481678009033203, |
|
"learning_rate": 4.8242532714293484e-06, |
|
"loss": 0.8799, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.6805946163117718, |
|
"grad_norm": 0.457158625125885, |
|
"learning_rate": 4.810200039799353e-06, |
|
"loss": 0.7346, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.6830052229811168, |
|
"grad_norm": 0.5239042639732361, |
|
"learning_rate": 4.796148309544532e-06, |
|
"loss": 0.7095, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.685415829650462, |
|
"grad_norm": 0.5864506959915161, |
|
"learning_rate": 4.7820981918183255e-06, |
|
"loss": 0.8662, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.6878264363198072, |
|
"grad_norm": 0.4723222851753235, |
|
"learning_rate": 4.768049797761422e-06, |
|
"loss": 0.7177, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.6902370429891522, |
|
"grad_norm": 0.5329029560089111, |
|
"learning_rate": 4.75400323850087e-06, |
|
"loss": 0.7767, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.6926476496584972, |
|
"grad_norm": 0.48759615421295166, |
|
"learning_rate": 4.739958625149211e-06, |
|
"loss": 0.7491, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.6950582563278425, |
|
"grad_norm": 0.5103580355644226, |
|
"learning_rate": 4.725916068803589e-06, |
|
"loss": 0.792, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.6974688629971877, |
|
"grad_norm": 0.5285454392433167, |
|
"learning_rate": 4.71187568054488e-06, |
|
"loss": 0.7926, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.6998794696665327, |
|
"grad_norm": 0.5422332286834717, |
|
"learning_rate": 4.697837571436804e-06, |
|
"loss": 0.8355, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.7022900763358777, |
|
"grad_norm": 0.46462592482566833, |
|
"learning_rate": 4.683801852525061e-06, |
|
"loss": 0.7552, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.704700683005223, |
|
"grad_norm": 0.5071070790290833, |
|
"learning_rate": 4.669768634836433e-06, |
|
"loss": 0.7966, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.7071112896745682, |
|
"grad_norm": 0.4237877130508423, |
|
"learning_rate": 4.6557380293779266e-06, |
|
"loss": 0.73, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.7095218963439134, |
|
"grad_norm": 0.49440935254096985, |
|
"learning_rate": 4.641710147135878e-06, |
|
"loss": 0.8559, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.7119325030132584, |
|
"grad_norm": 0.5085378885269165, |
|
"learning_rate": 4.627685099075089e-06, |
|
"loss": 0.7579, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.7143431096826034, |
|
"grad_norm": 0.478240430355072, |
|
"learning_rate": 4.613662996137931e-06, |
|
"loss": 0.7658, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.7167537163519486, |
|
"grad_norm": 0.436726838350296, |
|
"learning_rate": 4.59964394924349e-06, |
|
"loss": 0.7774, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.7191643230212938, |
|
"grad_norm": 0.4424239695072174, |
|
"learning_rate": 4.585628069286671e-06, |
|
"loss": 0.7686, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.7215749296906389, |
|
"grad_norm": 0.593245267868042, |
|
"learning_rate": 4.5716154671373336e-06, |
|
"loss": 0.8474, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.7239855363599839, |
|
"grad_norm": 0.49589934945106506, |
|
"learning_rate": 4.5576062536394e-06, |
|
"loss": 0.8633, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.726396143029329, |
|
"grad_norm": 0.4527803063392639, |
|
"learning_rate": 4.5436005396099945e-06, |
|
"loss": 0.743, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.7288067496986743, |
|
"grad_norm": 0.5478896498680115, |
|
"learning_rate": 4.529598435838556e-06, |
|
"loss": 0.778, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.7312173563680193, |
|
"grad_norm": 0.503695011138916, |
|
"learning_rate": 4.515600053085969e-06, |
|
"loss": 0.8975, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.7336279630373643, |
|
"grad_norm": 0.48544883728027344, |
|
"learning_rate": 4.501605502083677e-06, |
|
"loss": 0.7777, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.7360385697067096, |
|
"grad_norm": 0.4698773920536041, |
|
"learning_rate": 4.487614893532822e-06, |
|
"loss": 0.7268, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.7384491763760548, |
|
"grad_norm": 0.5563497543334961, |
|
"learning_rate": 4.47362833810335e-06, |
|
"loss": 0.8055, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.7408597830453998, |
|
"grad_norm": 0.49812015891075134, |
|
"learning_rate": 4.459645946433153e-06, |
|
"loss": 0.7705, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.7432703897147448, |
|
"grad_norm": 0.47763168811798096, |
|
"learning_rate": 4.445667829127185e-06, |
|
"loss": 0.7671, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.74568099638409, |
|
"grad_norm": 0.5280501246452332, |
|
"learning_rate": 4.4316940967565885e-06, |
|
"loss": 0.7628, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.7480916030534353, |
|
"grad_norm": 0.515335202217102, |
|
"learning_rate": 4.417724859857815e-06, |
|
"loss": 0.7631, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.7505022097227803, |
|
"grad_norm": 0.5291576981544495, |
|
"learning_rate": 4.403760228931764e-06, |
|
"loss": 0.8272, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.7529128163921253, |
|
"grad_norm": 0.485479474067688, |
|
"learning_rate": 4.389800314442893e-06, |
|
"loss": 0.7546, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.7553234230614705, |
|
"grad_norm": 0.46518808603286743, |
|
"learning_rate": 4.375845226818354e-06, |
|
"loss": 0.7298, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.7577340297308157, |
|
"grad_norm": 0.5309136509895325, |
|
"learning_rate": 4.361895076447118e-06, |
|
"loss": 0.847, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.7601446364001607, |
|
"grad_norm": 0.5040311217308044, |
|
"learning_rate": 4.347949973679098e-06, |
|
"loss": 0.7497, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.7625552430695057, |
|
"grad_norm": 0.5706872940063477, |
|
"learning_rate": 4.334010028824284e-06, |
|
"loss": 0.8181, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.764965849738851, |
|
"grad_norm": 0.5000127553939819, |
|
"learning_rate": 4.320075352151858e-06, |
|
"loss": 0.7261, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.7673764564081962, |
|
"grad_norm": 0.5730839371681213, |
|
"learning_rate": 4.306146053889336e-06, |
|
"loss": 0.8588, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.7697870630775412, |
|
"grad_norm": 0.5194482207298279, |
|
"learning_rate": 4.292222244221683e-06, |
|
"loss": 0.7913, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.7721976697468862, |
|
"grad_norm": 0.49545660614967346, |
|
"learning_rate": 4.278304033290454e-06, |
|
"loss": 0.7938, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.7746082764162314, |
|
"grad_norm": 0.5339921116828918, |
|
"learning_rate": 4.264391531192911e-06, |
|
"loss": 0.8523, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.7770188830855767, |
|
"grad_norm": 0.5100706219673157, |
|
"learning_rate": 4.250484847981163e-06, |
|
"loss": 0.6354, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.7794294897549217, |
|
"grad_norm": 0.5289222002029419, |
|
"learning_rate": 4.23658409366128e-06, |
|
"loss": 0.8711, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.7818400964242667, |
|
"grad_norm": 0.5450829863548279, |
|
"learning_rate": 4.2226893781924455e-06, |
|
"loss": 0.7097, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.784250703093612, |
|
"grad_norm": 0.6055769920349121, |
|
"learning_rate": 4.208800811486063e-06, |
|
"loss": 0.7659, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.7866613097629571, |
|
"grad_norm": 0.5302320122718811, |
|
"learning_rate": 4.194918503404905e-06, |
|
"loss": 0.782, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.7890719164323021, |
|
"grad_norm": 0.515344500541687, |
|
"learning_rate": 4.1810425637622295e-06, |
|
"loss": 0.7654, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.7914825231016471, |
|
"grad_norm": 0.513762354850769, |
|
"learning_rate": 4.167173102320925e-06, |
|
"loss": 0.787, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.7938931297709924, |
|
"grad_norm": 0.5050537586212158, |
|
"learning_rate": 4.15331022879263e-06, |
|
"loss": 0.7699, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.7963037364403376, |
|
"grad_norm": 0.5424744486808777, |
|
"learning_rate": 4.139454052836875e-06, |
|
"loss": 0.7094, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.7987143431096826, |
|
"grad_norm": 0.5176721811294556, |
|
"learning_rate": 4.125604684060207e-06, |
|
"loss": 0.7739, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.8011249497790276, |
|
"grad_norm": 0.5543960928916931, |
|
"learning_rate": 4.111762232015331e-06, |
|
"loss": 0.8358, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.8035355564483728, |
|
"grad_norm": 0.5798778533935547, |
|
"learning_rate": 4.0979268062002295e-06, |
|
"loss": 0.8436, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.805946163117718, |
|
"grad_norm": 0.47962695360183716, |
|
"learning_rate": 4.084098516057317e-06, |
|
"loss": 0.6803, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.808356769787063, |
|
"grad_norm": 0.6475107073783875, |
|
"learning_rate": 4.070277470972552e-06, |
|
"loss": 0.7577, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.810767376456408, |
|
"grad_norm": 0.5260526537895203, |
|
"learning_rate": 4.056463780274592e-06, |
|
"loss": 0.7948, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.8131779831257533, |
|
"grad_norm": 0.5049294829368591, |
|
"learning_rate": 4.042657553233911e-06, |
|
"loss": 0.8353, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.8155885897950985, |
|
"grad_norm": 0.5725390315055847, |
|
"learning_rate": 4.0288588990619475e-06, |
|
"loss": 0.7741, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.8179991964644435, |
|
"grad_norm": 0.4814889132976532, |
|
"learning_rate": 4.015067926910236e-06, |
|
"loss": 0.7457, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.8204098031337885, |
|
"grad_norm": 0.5058619379997253, |
|
"learning_rate": 4.001284745869544e-06, |
|
"loss": 0.7614, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.8228204098031338, |
|
"grad_norm": 0.5509936809539795, |
|
"learning_rate": 3.987509464969005e-06, |
|
"loss": 0.7849, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.825231016472479, |
|
"grad_norm": 0.6180452108383179, |
|
"learning_rate": 3.973742193175267e-06, |
|
"loss": 0.8434, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.827641623141824, |
|
"grad_norm": 0.45592281222343445, |
|
"learning_rate": 3.959983039391617e-06, |
|
"loss": 0.7444, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.830052229811169, |
|
"grad_norm": 0.5047600865364075, |
|
"learning_rate": 3.946232112457131e-06, |
|
"loss": 0.8394, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.8324628364805142, |
|
"grad_norm": 0.4943610429763794, |
|
"learning_rate": 3.932489521145804e-06, |
|
"loss": 0.7349, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.8348734431498595, |
|
"grad_norm": 0.48302721977233887, |
|
"learning_rate": 3.918755374165698e-06, |
|
"loss": 0.7639, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.8372840498192045, |
|
"grad_norm": 0.5173477530479431, |
|
"learning_rate": 3.905029780158072e-06, |
|
"loss": 0.7863, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.8396946564885495, |
|
"grad_norm": 0.4679282009601593, |
|
"learning_rate": 3.8913128476965356e-06, |
|
"loss": 0.7627, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 0.5126636028289795, |
|
"learning_rate": 3.877604685286177e-06, |
|
"loss": 0.7657, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.84451586982724, |
|
"grad_norm": 0.43907248973846436, |
|
"learning_rate": 3.863905401362714e-06, |
|
"loss": 0.6888, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.846926476496585, |
|
"grad_norm": 0.5296300053596497, |
|
"learning_rate": 3.850215104291631e-06, |
|
"loss": 0.8201, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.84933708316593, |
|
"grad_norm": 0.6165450811386108, |
|
"learning_rate": 3.836533902367324e-06, |
|
"loss": 0.8752, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.8517476898352752, |
|
"grad_norm": 0.4673651158809662, |
|
"learning_rate": 3.822861903812244e-06, |
|
"loss": 0.652, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.8541582965046204, |
|
"grad_norm": 0.5006290674209595, |
|
"learning_rate": 3.809199216776044e-06, |
|
"loss": 0.7965, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.8565689031739656, |
|
"grad_norm": 0.6010116338729858, |
|
"learning_rate": 3.7955459493347115e-06, |
|
"loss": 0.8454, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.8589795098433106, |
|
"grad_norm": 0.494977742433548, |
|
"learning_rate": 3.781902209489731e-06, |
|
"loss": 0.7741, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.8613901165126556, |
|
"grad_norm": 0.49059155583381653, |
|
"learning_rate": 3.7682681051672143e-06, |
|
"loss": 0.7448, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.8638007231820009, |
|
"grad_norm": 0.5115146636962891, |
|
"learning_rate": 3.7546437442170593e-06, |
|
"loss": 0.8069, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.866211329851346, |
|
"grad_norm": 0.5271327495574951, |
|
"learning_rate": 3.7410292344120847e-06, |
|
"loss": 0.7874, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.868621936520691, |
|
"grad_norm": 0.44098061323165894, |
|
"learning_rate": 3.7274246834471893e-06, |
|
"loss": 0.6913, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.871032543190036, |
|
"grad_norm": 0.5418987274169922, |
|
"learning_rate": 3.7138301989384896e-06, |
|
"loss": 0.7498, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.8734431498593813, |
|
"grad_norm": 0.5940513610839844, |
|
"learning_rate": 3.7002458884224778e-06, |
|
"loss": 0.9077, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.8758537565287265, |
|
"grad_norm": 0.44084081053733826, |
|
"learning_rate": 3.686671859355162e-06, |
|
"loss": 0.6846, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.8782643631980716, |
|
"grad_norm": 0.5337911248207092, |
|
"learning_rate": 3.6731082191112267e-06, |
|
"loss": 0.7984, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.8806749698674166, |
|
"grad_norm": 0.5034635663032532, |
|
"learning_rate": 3.659555074983171e-06, |
|
"loss": 0.742, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.8830855765367618, |
|
"grad_norm": 0.550623893737793, |
|
"learning_rate": 3.646012534180472e-06, |
|
"loss": 0.7844, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.885496183206107, |
|
"grad_norm": 0.5091609954833984, |
|
"learning_rate": 3.632480703828728e-06, |
|
"loss": 0.7723, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.887906789875452, |
|
"grad_norm": 0.48753273487091064, |
|
"learning_rate": 3.6189596909688194e-06, |
|
"loss": 0.8099, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.890317396544797, |
|
"grad_norm": 0.5054701566696167, |
|
"learning_rate": 3.6054496025560475e-06, |
|
"loss": 0.774, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.8927280032141423, |
|
"grad_norm": 0.464444100856781, |
|
"learning_rate": 3.5919505454593084e-06, |
|
"loss": 0.7521, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.8951386098834875, |
|
"grad_norm": 0.5345428586006165, |
|
"learning_rate": 3.578462626460229e-06, |
|
"loss": 0.7574, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.8975492165528325, |
|
"grad_norm": 0.5477241277694702, |
|
"learning_rate": 3.5649859522523362e-06, |
|
"loss": 0.8778, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.8999598232221775, |
|
"grad_norm": 0.4662015438079834, |
|
"learning_rate": 3.5515206294402026e-06, |
|
"loss": 0.7499, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.9023704298915227, |
|
"grad_norm": 0.466071218252182, |
|
"learning_rate": 3.538066764538614e-06, |
|
"loss": 0.794, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.904781036560868, |
|
"grad_norm": 0.4782322347164154, |
|
"learning_rate": 3.524624463971712e-06, |
|
"loss": 0.7601, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.907191643230213, |
|
"grad_norm": 0.4884680211544037, |
|
"learning_rate": 3.5111938340721687e-06, |
|
"loss": 0.8311, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.909602249899558, |
|
"grad_norm": 0.4873200058937073, |
|
"learning_rate": 3.4977749810803326e-06, |
|
"loss": 0.7592, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.9120128565689032, |
|
"grad_norm": 0.5784322023391724, |
|
"learning_rate": 3.484368011143398e-06, |
|
"loss": 0.8872, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.9144234632382484, |
|
"grad_norm": 0.4508724808692932, |
|
"learning_rate": 3.470973030314554e-06, |
|
"loss": 0.7274, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.9168340699075934, |
|
"grad_norm": 0.4931320548057556, |
|
"learning_rate": 3.457590144552159e-06, |
|
"loss": 0.7237, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.9192446765769384, |
|
"grad_norm": 0.5083304047584534, |
|
"learning_rate": 3.4442194597188916e-06, |
|
"loss": 0.7589, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.9216552832462837, |
|
"grad_norm": 0.569831907749176, |
|
"learning_rate": 3.430861081580921e-06, |
|
"loss": 0.817, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.9240658899156289, |
|
"grad_norm": 0.5158292055130005, |
|
"learning_rate": 3.4175151158070595e-06, |
|
"loss": 0.8407, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.9264764965849739, |
|
"grad_norm": 0.45425304770469666, |
|
"learning_rate": 3.404181667967944e-06, |
|
"loss": 0.7216, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.928887103254319, |
|
"grad_norm": 0.45532211661338806, |
|
"learning_rate": 3.3908608435351804e-06, |
|
"loss": 0.7881, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.9312977099236641, |
|
"grad_norm": 0.6385839581489563, |
|
"learning_rate": 3.3775527478805283e-06, |
|
"loss": 0.8366, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.9337083165930093, |
|
"grad_norm": 0.49061232805252075, |
|
"learning_rate": 3.364257486275053e-06, |
|
"loss": 0.6703, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.9361189232623544, |
|
"grad_norm": 0.4874611496925354, |
|
"learning_rate": 3.3509751638883038e-06, |
|
"loss": 0.8454, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.9385295299316994, |
|
"grad_norm": 0.5368838310241699, |
|
"learning_rate": 3.337705885787469e-06, |
|
"loss": 0.8084, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.9409401366010446, |
|
"grad_norm": 0.45422106981277466, |
|
"learning_rate": 3.3244497569365613e-06, |
|
"loss": 0.7444, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.9433507432703898, |
|
"grad_norm": 0.5121768712997437, |
|
"learning_rate": 3.3112068821955722e-06, |
|
"loss": 0.7654, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.9457613499397348, |
|
"grad_norm": 0.5135585069656372, |
|
"learning_rate": 3.297977366319657e-06, |
|
"loss": 0.7905, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.9481719566090798, |
|
"grad_norm": 0.4917021691799164, |
|
"learning_rate": 3.284761313958286e-06, |
|
"loss": 0.7467, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.950582563278425, |
|
"grad_norm": 0.5125773549079895, |
|
"learning_rate": 3.271558829654442e-06, |
|
"loss": 0.8483, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.9529931699477703, |
|
"grad_norm": 0.45189735293388367, |
|
"learning_rate": 3.2583700178437714e-06, |
|
"loss": 0.7283, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.9554037766171153, |
|
"grad_norm": 0.5409987568855286, |
|
"learning_rate": 3.245194982853774e-06, |
|
"loss": 0.9053, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.9578143832864603, |
|
"grad_norm": 0.45805126428604126, |
|
"learning_rate": 3.232033828902963e-06, |
|
"loss": 0.6193, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.9602249899558055, |
|
"grad_norm": 0.5065536499023438, |
|
"learning_rate": 3.218886660100055e-06, |
|
"loss": 0.8551, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.9626355966251507, |
|
"grad_norm": 0.4294704496860504, |
|
"learning_rate": 3.205753580443136e-06, |
|
"loss": 0.7046, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.9650462032944958, |
|
"grad_norm": 0.5121597647666931, |
|
"learning_rate": 3.1926346938188434e-06, |
|
"loss": 0.6777, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.9674568099638408, |
|
"grad_norm": 0.5309213995933533, |
|
"learning_rate": 3.1795301040015477e-06, |
|
"loss": 0.8976, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.969867416633186, |
|
"grad_norm": 0.4532058835029602, |
|
"learning_rate": 3.166439914652518e-06, |
|
"loss": 0.689, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.9722780233025312, |
|
"grad_norm": 0.49570393562316895, |
|
"learning_rate": 3.1533642293191204e-06, |
|
"loss": 0.8078, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.9746886299718762, |
|
"grad_norm": 0.4409126043319702, |
|
"learning_rate": 3.1403031514339844e-06, |
|
"loss": 0.7737, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.9770992366412212, |
|
"grad_norm": 0.44032543897628784, |
|
"learning_rate": 3.1272567843141955e-06, |
|
"loss": 0.8226, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.9795098433105665, |
|
"grad_norm": 0.4650809168815613, |
|
"learning_rate": 3.1142252311604635e-06, |
|
"loss": 0.789, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.9819204499799117, |
|
"grad_norm": 0.4510478675365448, |
|
"learning_rate": 3.1012085950563264e-06, |
|
"loss": 0.7631, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.9843310566492567, |
|
"grad_norm": 0.4166731834411621, |
|
"learning_rate": 3.088206978967315e-06, |
|
"loss": 0.7008, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.9867416633186017, |
|
"grad_norm": 0.4695136249065399, |
|
"learning_rate": 3.0752204857401534e-06, |
|
"loss": 0.7981, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.989152269987947, |
|
"grad_norm": 0.48384779691696167, |
|
"learning_rate": 3.0622492181019325e-06, |
|
"loss": 0.8242, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.9915628766572921, |
|
"grad_norm": 0.4991871118545532, |
|
"learning_rate": 3.0492932786593132e-06, |
|
"loss": 0.7606, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.9939734833266372, |
|
"grad_norm": 0.4156096577644348, |
|
"learning_rate": 3.0363527698976948e-06, |
|
"loss": 0.8353, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.9963840899959822, |
|
"grad_norm": 0.45269855856895447, |
|
"learning_rate": 3.0234277941804247e-06, |
|
"loss": 0.7698, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.9987946966653274, |
|
"grad_norm": 0.5202860832214355, |
|
"learning_rate": 3.0105184537479747e-06, |
|
"loss": 0.869, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.001607071112897, |
|
"grad_norm": 0.6450104713439941, |
|
"learning_rate": 2.9976248507171392e-06, |
|
"loss": 0.9072, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.004017677782242, |
|
"grad_norm": 0.5156070590019226, |
|
"learning_rate": 2.98474708708022e-06, |
|
"loss": 0.7218, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.006428284451587, |
|
"grad_norm": 0.45881029963493347, |
|
"learning_rate": 2.971885264704232e-06, |
|
"loss": 0.7287, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.008838891120932, |
|
"grad_norm": 0.4517820477485657, |
|
"learning_rate": 2.959039485330082e-06, |
|
"loss": 0.6569, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.0112494977902773, |
|
"grad_norm": 0.47716987133026123, |
|
"learning_rate": 2.9462098505717796e-06, |
|
"loss": 0.7514, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.0136601044596225, |
|
"grad_norm": 0.555299699306488, |
|
"learning_rate": 2.9333964619156175e-06, |
|
"loss": 0.8071, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.0160707111289673, |
|
"grad_norm": 0.45214855670928955, |
|
"learning_rate": 2.920599420719384e-06, |
|
"loss": 0.7228, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.0184813177983125, |
|
"grad_norm": 0.48705416917800903, |
|
"learning_rate": 2.907818828211548e-06, |
|
"loss": 0.7575, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.0208919244676578, |
|
"grad_norm": 0.5375069379806519, |
|
"learning_rate": 2.895054785490474e-06, |
|
"loss": 0.788, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.023302531137003, |
|
"grad_norm": 0.49045616388320923, |
|
"learning_rate": 2.8823073935235955e-06, |
|
"loss": 0.6536, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.0257131378063478, |
|
"grad_norm": 0.5409977436065674, |
|
"learning_rate": 2.869576753146652e-06, |
|
"loss": 0.728, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.028123744475693, |
|
"grad_norm": 0.5111508965492249, |
|
"learning_rate": 2.8568629650628633e-06, |
|
"loss": 0.7232, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.030534351145038, |
|
"grad_norm": 0.5003826022148132, |
|
"learning_rate": 2.8441661298421423e-06, |
|
"loss": 0.8149, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.0329449578143834, |
|
"grad_norm": 0.4179787039756775, |
|
"learning_rate": 2.8314863479203026e-06, |
|
"loss": 0.6926, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.0353555644837282, |
|
"grad_norm": 0.5544188618659973, |
|
"learning_rate": 2.8188237195982583e-06, |
|
"loss": 0.8301, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.0377661711530735, |
|
"grad_norm": 0.4455435872077942, |
|
"learning_rate": 2.806178345041234e-06, |
|
"loss": 0.7325, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.0401767778224187, |
|
"grad_norm": 0.45152920484542847, |
|
"learning_rate": 2.793550324277977e-06, |
|
"loss": 0.7377, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.042587384491764, |
|
"grad_norm": 0.41274791955947876, |
|
"learning_rate": 2.7809397571999506e-06, |
|
"loss": 0.6473, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.0449979911611087, |
|
"grad_norm": 0.5886186957359314, |
|
"learning_rate": 2.768346743560565e-06, |
|
"loss": 0.8196, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.047408597830454, |
|
"grad_norm": 0.5123888254165649, |
|
"learning_rate": 2.755771382974369e-06, |
|
"loss": 0.8714, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.049819204499799, |
|
"grad_norm": 0.4040084481239319, |
|
"learning_rate": 2.743213774916275e-06, |
|
"loss": 0.6801, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.0522298111691444, |
|
"grad_norm": 0.504391610622406, |
|
"learning_rate": 2.7306740187207665e-06, |
|
"loss": 0.69, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.054640417838489, |
|
"grad_norm": 0.4550461769104004, |
|
"learning_rate": 2.7181522135811123e-06, |
|
"loss": 0.7659, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.0570510245078344, |
|
"grad_norm": 0.46919989585876465, |
|
"learning_rate": 2.705648458548584e-06, |
|
"loss": 0.7481, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.0594616311771796, |
|
"grad_norm": 0.5024572014808655, |
|
"learning_rate": 2.693162852531671e-06, |
|
"loss": 0.7341, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.061872237846525, |
|
"grad_norm": 0.44993552565574646, |
|
"learning_rate": 2.6806954942952973e-06, |
|
"loss": 0.7498, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.0642828445158696, |
|
"grad_norm": 0.46789172291755676, |
|
"learning_rate": 2.6682464824600473e-06, |
|
"loss": 0.7624, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.066693451185215, |
|
"grad_norm": 0.47225359082221985, |
|
"learning_rate": 2.6558159155013684e-06, |
|
"loss": 0.7487, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.06910405785456, |
|
"grad_norm": 0.458960622549057, |
|
"learning_rate": 2.6434038917488163e-06, |
|
"loss": 0.8097, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.0715146645239053, |
|
"grad_norm": 0.4237467348575592, |
|
"learning_rate": 2.6310105093852558e-06, |
|
"loss": 0.7236, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.07392527119325, |
|
"grad_norm": 0.42090222239494324, |
|
"learning_rate": 2.6186358664460947e-06, |
|
"loss": 0.7229, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.0763358778625953, |
|
"grad_norm": 0.4907824695110321, |
|
"learning_rate": 2.606280060818508e-06, |
|
"loss": 0.7857, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.0787464845319406, |
|
"grad_norm": 0.4463391602039337, |
|
"learning_rate": 2.593943190240659e-06, |
|
"loss": 0.7054, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.081157091201286, |
|
"grad_norm": 0.4785970151424408, |
|
"learning_rate": 2.5816253523009305e-06, |
|
"loss": 0.7134, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.0835676978706306, |
|
"grad_norm": 0.43754929304122925, |
|
"learning_rate": 2.5693266444371556e-06, |
|
"loss": 0.719, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.085978304539976, |
|
"grad_norm": 0.5116653442382812, |
|
"learning_rate": 2.557047163935832e-06, |
|
"loss": 0.7123, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.088388911209321, |
|
"grad_norm": 0.5629407167434692, |
|
"learning_rate": 2.5447870079313764e-06, |
|
"loss": 0.8839, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.0907995178786662, |
|
"grad_norm": 0.44065314531326294, |
|
"learning_rate": 2.5325462734053298e-06, |
|
"loss": 0.6485, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.093210124548011, |
|
"grad_norm": 0.4992212951183319, |
|
"learning_rate": 2.520325057185614e-06, |
|
"loss": 0.7854, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.0956207312173563, |
|
"grad_norm": 0.4579334855079651, |
|
"learning_rate": 2.508123455945751e-06, |
|
"loss": 0.7579, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.0980313378867015, |
|
"grad_norm": 0.432600736618042, |
|
"learning_rate": 2.4959415662041005e-06, |
|
"loss": 0.6572, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.1004419445560467, |
|
"grad_norm": 0.47311294078826904, |
|
"learning_rate": 2.483779484323101e-06, |
|
"loss": 0.7735, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.102852551225392, |
|
"grad_norm": 0.4287633001804352, |
|
"learning_rate": 2.4716373065085037e-06, |
|
"loss": 0.7641, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 0.43689078092575073, |
|
"learning_rate": 2.4595151288086104e-06, |
|
"loss": 0.6999, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.107673764564082, |
|
"grad_norm": 0.42172539234161377, |
|
"learning_rate": 2.447413047113525e-06, |
|
"loss": 0.7577, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.110084371233427, |
|
"grad_norm": 0.449970543384552, |
|
"learning_rate": 2.435331157154371e-06, |
|
"loss": 0.743, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.1124949779027724, |
|
"grad_norm": 0.4546036124229431, |
|
"learning_rate": 2.4232695545025625e-06, |
|
"loss": 0.6852, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.114905584572117, |
|
"grad_norm": 0.45672449469566345, |
|
"learning_rate": 2.411228334569029e-06, |
|
"loss": 0.8218, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.1173161912414624, |
|
"grad_norm": 0.3891432583332062, |
|
"learning_rate": 2.3992075926034663e-06, |
|
"loss": 0.6448, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.1197267979108076, |
|
"grad_norm": 0.48973438143730164, |
|
"learning_rate": 2.3872074236935854e-06, |
|
"loss": 0.8659, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.122137404580153, |
|
"grad_norm": 0.4461488425731659, |
|
"learning_rate": 2.375227922764356e-06, |
|
"loss": 0.7092, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.1245480112494977, |
|
"grad_norm": 0.43319955468177795, |
|
"learning_rate": 2.3632691845772592e-06, |
|
"loss": 0.6859, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.126958617918843, |
|
"grad_norm": 0.427815318107605, |
|
"learning_rate": 2.351331303729536e-06, |
|
"loss": 0.7603, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.129369224588188, |
|
"grad_norm": 0.47836270928382874, |
|
"learning_rate": 2.3394143746534383e-06, |
|
"loss": 0.7766, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.1317798312575333, |
|
"grad_norm": 0.4509316086769104, |
|
"learning_rate": 2.3275184916154897e-06, |
|
"loss": 0.7733, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.134190437926878, |
|
"grad_norm": 0.45884618163108826, |
|
"learning_rate": 2.31564374871572e-06, |
|
"loss": 0.7064, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.1366010445962234, |
|
"grad_norm": 0.44929900765419006, |
|
"learning_rate": 2.30379023988695e-06, |
|
"loss": 0.6178, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.1390116512655686, |
|
"grad_norm": 0.4776729941368103, |
|
"learning_rate": 2.2919580588940203e-06, |
|
"loss": 0.8329, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.141422257934914, |
|
"grad_norm": 0.43089407682418823, |
|
"learning_rate": 2.280147299333068e-06, |
|
"loss": 0.708, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.1438328646042586, |
|
"grad_norm": 0.4724086821079254, |
|
"learning_rate": 2.268358054630778e-06, |
|
"loss": 0.7594, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.146243471273604, |
|
"grad_norm": 0.45946723222732544, |
|
"learning_rate": 2.256590418043647e-06, |
|
"loss": 0.7212, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.148654077942949, |
|
"grad_norm": 0.4457273483276367, |
|
"learning_rate": 2.244844482657242e-06, |
|
"loss": 0.7801, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.1510646846122943, |
|
"grad_norm": 0.4425724148750305, |
|
"learning_rate": 2.2331203413854756e-06, |
|
"loss": 0.701, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.153475291281639, |
|
"grad_norm": 0.44919276237487793, |
|
"learning_rate": 2.221418086969849e-06, |
|
"loss": 0.7456, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.1558858979509843, |
|
"grad_norm": 0.40379559993743896, |
|
"learning_rate": 2.2097378119787472e-06, |
|
"loss": 0.7446, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.1582965046203295, |
|
"grad_norm": 0.39578792452812195, |
|
"learning_rate": 2.1980796088066743e-06, |
|
"loss": 0.6737, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.1607071112896747, |
|
"grad_norm": 0.4230864346027374, |
|
"learning_rate": 2.186443569673554e-06, |
|
"loss": 0.8167, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.1631177179590195, |
|
"grad_norm": 0.43164435029029846, |
|
"learning_rate": 2.1748297866239797e-06, |
|
"loss": 0.6706, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.1655283246283648, |
|
"grad_norm": 0.44961944222450256, |
|
"learning_rate": 2.163238351526492e-06, |
|
"loss": 0.7525, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.16793893129771, |
|
"grad_norm": 0.45515599846839905, |
|
"learning_rate": 2.151669356072854e-06, |
|
"loss": 0.7806, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.170349537967055, |
|
"grad_norm": 0.41758570075035095, |
|
"learning_rate": 2.140122891777323e-06, |
|
"loss": 0.7265, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.1727601446364, |
|
"grad_norm": 0.45163440704345703, |
|
"learning_rate": 2.1285990499759294e-06, |
|
"loss": 0.7998, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.175170751305745, |
|
"grad_norm": 0.40521442890167236, |
|
"learning_rate": 2.1170979218257525e-06, |
|
"loss": 0.6439, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.1775813579750904, |
|
"grad_norm": 0.4744569659233093, |
|
"learning_rate": 2.1056195983041987e-06, |
|
"loss": 0.7664, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.1799919646444357, |
|
"grad_norm": 0.4329550862312317, |
|
"learning_rate": 2.094164170208282e-06, |
|
"loss": 0.6957, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.1824025713137805, |
|
"grad_norm": 0.47254642844200134, |
|
"learning_rate": 2.0827317281539142e-06, |
|
"loss": 0.8263, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.1848131779831257, |
|
"grad_norm": 0.4756420850753784, |
|
"learning_rate": 2.071322362575167e-06, |
|
"loss": 0.7397, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.187223784652471, |
|
"grad_norm": 0.4532981812953949, |
|
"learning_rate": 2.059936163723586e-06, |
|
"loss": 0.7581, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.189634391321816, |
|
"grad_norm": 0.4664817750453949, |
|
"learning_rate": 2.048573221667444e-06, |
|
"loss": 0.752, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.192044997991161, |
|
"grad_norm": 0.4368799328804016, |
|
"learning_rate": 2.037233626291061e-06, |
|
"loss": 0.7466, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.194455604660506, |
|
"grad_norm": 0.45968392491340637, |
|
"learning_rate": 2.0259174672940686e-06, |
|
"loss": 0.7624, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.1968662113298514, |
|
"grad_norm": 0.43567362427711487, |
|
"learning_rate": 2.0146248341907123e-06, |
|
"loss": 0.7123, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.1992768179991966, |
|
"grad_norm": 0.4111224412918091, |
|
"learning_rate": 2.00335581630914e-06, |
|
"loss": 0.7519, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.2016874246685414, |
|
"grad_norm": 0.4405570328235626, |
|
"learning_rate": 1.992110502790696e-06, |
|
"loss": 0.7757, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.2040980313378866, |
|
"grad_norm": 0.46473294496536255, |
|
"learning_rate": 1.9808889825892156e-06, |
|
"loss": 0.7668, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.206508638007232, |
|
"grad_norm": 0.46520110964775085, |
|
"learning_rate": 1.969691344470327e-06, |
|
"loss": 0.7371, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.208919244676577, |
|
"grad_norm": 0.43973931670188904, |
|
"learning_rate": 1.9585176770107324e-06, |
|
"loss": 0.7071, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.211329851345922, |
|
"grad_norm": 0.4563467502593994, |
|
"learning_rate": 1.9473680685975305e-06, |
|
"loss": 0.7264, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.213740458015267, |
|
"grad_norm": 0.49203842878341675, |
|
"learning_rate": 1.9362426074275003e-06, |
|
"loss": 0.7944, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.2161510646846123, |
|
"grad_norm": 0.4180643856525421, |
|
"learning_rate": 1.925141381506408e-06, |
|
"loss": 0.749, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.2185616713539575, |
|
"grad_norm": 0.40201982855796814, |
|
"learning_rate": 1.914064478648314e-06, |
|
"loss": 0.7118, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.2209722780233023, |
|
"grad_norm": 0.4542165696620941, |
|
"learning_rate": 1.9030119864748747e-06, |
|
"loss": 0.8035, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.2233828846926476, |
|
"grad_norm": 0.442167729139328, |
|
"learning_rate": 1.8919839924146504e-06, |
|
"loss": 0.7648, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.225793491361993, |
|
"grad_norm": 0.5001323223114014, |
|
"learning_rate": 1.8809805837024148e-06, |
|
"loss": 0.7304, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.228204098031338, |
|
"grad_norm": 0.47148486971855164, |
|
"learning_rate": 1.8700018473784614e-06, |
|
"loss": 0.7166, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.230614704700683, |
|
"grad_norm": 0.4796871542930603, |
|
"learning_rate": 1.859047870287926e-06, |
|
"loss": 0.7559, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.233025311370028, |
|
"grad_norm": 0.4473366141319275, |
|
"learning_rate": 1.848118739080077e-06, |
|
"loss": 0.7561, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.2354359180393732, |
|
"grad_norm": 0.41103044152259827, |
|
"learning_rate": 1.8372145402076586e-06, |
|
"loss": 0.6952, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.2378465247087185, |
|
"grad_norm": 0.4418680667877197, |
|
"learning_rate": 1.826335359926184e-06, |
|
"loss": 0.7806, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.2402571313780637, |
|
"grad_norm": 0.4269905388355255, |
|
"learning_rate": 1.8154812842932651e-06, |
|
"loss": 0.7189, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.2426677380474085, |
|
"grad_norm": 0.4618895351886749, |
|
"learning_rate": 1.8046523991679271e-06, |
|
"loss": 0.7683, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.2450783447167537, |
|
"grad_norm": 0.460475891828537, |
|
"learning_rate": 1.7938487902099323e-06, |
|
"loss": 0.8004, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.247488951386099, |
|
"grad_norm": 0.44570979475975037, |
|
"learning_rate": 1.7830705428790967e-06, |
|
"loss": 0.7299, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.2498995580554437, |
|
"grad_norm": 0.4265737533569336, |
|
"learning_rate": 1.7723177424346272e-06, |
|
"loss": 0.675, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.252310164724789, |
|
"grad_norm": 0.4518434703350067, |
|
"learning_rate": 1.7615904739344252e-06, |
|
"loss": 0.7065, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.254720771394134, |
|
"grad_norm": 0.4241621792316437, |
|
"learning_rate": 1.7508888222344407e-06, |
|
"loss": 0.6681, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.2571313780634794, |
|
"grad_norm": 0.4530150890350342, |
|
"learning_rate": 1.7402128719879723e-06, |
|
"loss": 0.7767, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.2595419847328246, |
|
"grad_norm": 0.45492124557495117, |
|
"learning_rate": 1.7295627076450267e-06, |
|
"loss": 0.7728, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.2619525914021694, |
|
"grad_norm": 0.38592711091041565, |
|
"learning_rate": 1.7189384134516262e-06, |
|
"loss": 0.6564, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.2643631980715146, |
|
"grad_norm": 0.4892207086086273, |
|
"learning_rate": 1.7083400734491568e-06, |
|
"loss": 0.8132, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.26677380474086, |
|
"grad_norm": 0.43954434990882874, |
|
"learning_rate": 1.6977677714736978e-06, |
|
"loss": 0.7165, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.2691844114102047, |
|
"grad_norm": 0.44025689363479614, |
|
"learning_rate": 1.6872215911553602e-06, |
|
"loss": 0.6898, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.27159501807955, |
|
"grad_norm": 0.42411690950393677, |
|
"learning_rate": 1.6767016159176226e-06, |
|
"loss": 0.7022, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.274005624748895, |
|
"grad_norm": 0.4893578588962555, |
|
"learning_rate": 1.6662079289766802e-06, |
|
"loss": 0.7966, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.2764162314182403, |
|
"grad_norm": 0.4120705723762512, |
|
"learning_rate": 1.6557406133407693e-06, |
|
"loss": 0.7007, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.2788268380875856, |
|
"grad_norm": 0.42650339007377625, |
|
"learning_rate": 1.645299751809531e-06, |
|
"loss": 0.7524, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.2812374447569304, |
|
"grad_norm": 0.436956524848938, |
|
"learning_rate": 1.6348854269733415e-06, |
|
"loss": 0.6484, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.2836480514262756, |
|
"grad_norm": 0.4705548882484436, |
|
"learning_rate": 1.6244977212126639e-06, |
|
"loss": 0.7546, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.286058658095621, |
|
"grad_norm": 0.43017950654029846, |
|
"learning_rate": 1.6141367166973975e-06, |
|
"loss": 0.7197, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.288469264764966, |
|
"grad_norm": 0.4429386556148529, |
|
"learning_rate": 1.6038024953862263e-06, |
|
"loss": 0.7752, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.290879871434311, |
|
"grad_norm": 0.4189038872718811, |
|
"learning_rate": 1.5934951390259706e-06, |
|
"loss": 0.7549, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.293290478103656, |
|
"grad_norm": 0.4262165129184723, |
|
"learning_rate": 1.5832147291509414e-06, |
|
"loss": 0.6839, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.2957010847730013, |
|
"grad_norm": 0.4689725935459137, |
|
"learning_rate": 1.5729613470822925e-06, |
|
"loss": 0.77, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.2981116914423465, |
|
"grad_norm": 0.4400913119316101, |
|
"learning_rate": 1.5627350739273873e-06, |
|
"loss": 0.7844, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.3005222981116913, |
|
"grad_norm": 0.43795400857925415, |
|
"learning_rate": 1.5525359905791366e-06, |
|
"loss": 0.7039, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.3029329047810365, |
|
"grad_norm": 0.4654630124568939, |
|
"learning_rate": 1.5423641777153835e-06, |
|
"loss": 0.803, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.3053435114503817, |
|
"grad_norm": 0.42076098918914795, |
|
"learning_rate": 1.5322197157982483e-06, |
|
"loss": 0.7234, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.307754118119727, |
|
"grad_norm": 0.4572290778160095, |
|
"learning_rate": 1.522102685073496e-06, |
|
"loss": 0.7132, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.3101647247890718, |
|
"grad_norm": 0.43574434518814087, |
|
"learning_rate": 1.512013165569905e-06, |
|
"loss": 0.7845, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.312575331458417, |
|
"grad_norm": 0.46240317821502686, |
|
"learning_rate": 1.5019512370986294e-06, |
|
"loss": 0.7477, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.314985938127762, |
|
"grad_norm": 0.444572776556015, |
|
"learning_rate": 1.4919169792525705e-06, |
|
"loss": 0.6624, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.3173965447971074, |
|
"grad_norm": 0.47464096546173096, |
|
"learning_rate": 1.481910471405751e-06, |
|
"loss": 0.7834, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.319807151466452, |
|
"grad_norm": 0.44317322969436646, |
|
"learning_rate": 1.4719317927126732e-06, |
|
"loss": 0.6895, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.3222177581357974, |
|
"grad_norm": 0.4504746198654175, |
|
"learning_rate": 1.4619810221077135e-06, |
|
"loss": 0.7765, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.3246283648051427, |
|
"grad_norm": 0.44130560755729675, |
|
"learning_rate": 1.452058238304475e-06, |
|
"loss": 0.7194, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.327038971474488, |
|
"grad_norm": 0.43299388885498047, |
|
"learning_rate": 1.4421635197951889e-06, |
|
"loss": 0.8262, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.3294495781438327, |
|
"grad_norm": 0.3990250825881958, |
|
"learning_rate": 1.432296944850073e-06, |
|
"loss": 0.6881, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.331860184813178, |
|
"grad_norm": 0.4231194257736206, |
|
"learning_rate": 1.4224585915167233e-06, |
|
"loss": 0.8018, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.334270791482523, |
|
"grad_norm": 0.44107508659362793, |
|
"learning_rate": 1.4126485376194954e-06, |
|
"loss": 0.723, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.3366813981518684, |
|
"grad_norm": 0.4841976463794708, |
|
"learning_rate": 1.4028668607588863e-06, |
|
"loss": 0.8458, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.339092004821213, |
|
"grad_norm": 0.4238888621330261, |
|
"learning_rate": 1.3931136383109216e-06, |
|
"loss": 0.6292, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.3415026114905584, |
|
"grad_norm": 0.4618934094905853, |
|
"learning_rate": 1.3833889474265495e-06, |
|
"loss": 0.7936, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.3439132181599036, |
|
"grad_norm": 0.43347546458244324, |
|
"learning_rate": 1.3736928650310138e-06, |
|
"loss": 0.7115, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.346323824829249, |
|
"grad_norm": 0.4453343451023102, |
|
"learning_rate": 1.3640254678232696e-06, |
|
"loss": 0.6876, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.3487344314985936, |
|
"grad_norm": 0.47834518551826477, |
|
"learning_rate": 1.3543868322753507e-06, |
|
"loss": 0.7283, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.351145038167939, |
|
"grad_norm": 0.44158080220222473, |
|
"learning_rate": 1.3447770346317878e-06, |
|
"loss": 0.6892, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.353555644837284, |
|
"grad_norm": 0.47228848934173584, |
|
"learning_rate": 1.33519615090899e-06, |
|
"loss": 0.7048, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.3559662515066293, |
|
"grad_norm": 0.4358145594596863, |
|
"learning_rate": 1.3256442568946492e-06, |
|
"loss": 0.7303, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.3583768581759745, |
|
"grad_norm": 0.4501979649066925, |
|
"learning_rate": 1.3161214281471406e-06, |
|
"loss": 0.7057, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.3607874648453193, |
|
"grad_norm": 0.491491436958313, |
|
"learning_rate": 1.3066277399949228e-06, |
|
"loss": 0.8002, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.3631980715146645, |
|
"grad_norm": 0.43371856212615967, |
|
"learning_rate": 1.297163267535944e-06, |
|
"loss": 0.6776, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.3656086781840098, |
|
"grad_norm": 0.4635317623615265, |
|
"learning_rate": 1.2877280856370527e-06, |
|
"loss": 0.7789, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.3680192848533546, |
|
"grad_norm": 0.4209749698638916, |
|
"learning_rate": 1.2783222689333886e-06, |
|
"loss": 0.634, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.3704298915227, |
|
"grad_norm": 0.47543400526046753, |
|
"learning_rate": 1.2689458918278158e-06, |
|
"loss": 0.7873, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.372840498192045, |
|
"grad_norm": 0.43107813596725464, |
|
"learning_rate": 1.2595990284903154e-06, |
|
"loss": 0.7661, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.3752511048613902, |
|
"grad_norm": 0.4292666018009186, |
|
"learning_rate": 1.2502817528574074e-06, |
|
"loss": 0.7588, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.3776617115307355, |
|
"grad_norm": 0.4570859968662262, |
|
"learning_rate": 1.240994138631566e-06, |
|
"loss": 0.7607, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.3800723182000803, |
|
"grad_norm": 0.45180368423461914, |
|
"learning_rate": 1.2317362592806277e-06, |
|
"loss": 0.6693, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.3824829248694255, |
|
"grad_norm": 0.4896697402000427, |
|
"learning_rate": 1.2225081880372275e-06, |
|
"loss": 0.7532, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.3848935315387707, |
|
"grad_norm": 0.4716828465461731, |
|
"learning_rate": 1.2133099978982016e-06, |
|
"loss": 0.7717, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.3873041382081155, |
|
"grad_norm": 0.4532492160797119, |
|
"learning_rate": 1.2041417616240202e-06, |
|
"loss": 0.7097, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.3897147448774607, |
|
"grad_norm": 0.4797171652317047, |
|
"learning_rate": 1.1950035517382102e-06, |
|
"loss": 0.7309, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.392125351546806, |
|
"grad_norm": 0.42635875940322876, |
|
"learning_rate": 1.185895440526778e-06, |
|
"loss": 0.7612, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.394535958216151, |
|
"grad_norm": 0.41793176531791687, |
|
"learning_rate": 1.176817500037642e-06, |
|
"loss": 0.6893, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.3969465648854964, |
|
"grad_norm": 0.43717044591903687, |
|
"learning_rate": 1.1677698020800637e-06, |
|
"loss": 0.8055, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.399357171554841, |
|
"grad_norm": 0.4092640280723572, |
|
"learning_rate": 1.1587524182240705e-06, |
|
"loss": 0.7153, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.4017677782241864, |
|
"grad_norm": 0.4540466368198395, |
|
"learning_rate": 1.1497654197999026e-06, |
|
"loss": 0.7867, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.4041783848935316, |
|
"grad_norm": 0.3998194932937622, |
|
"learning_rate": 1.140808877897439e-06, |
|
"loss": 0.6229, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.4065889915628764, |
|
"grad_norm": 0.5297384262084961, |
|
"learning_rate": 1.1318828633656392e-06, |
|
"loss": 0.8667, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.4089995982322217, |
|
"grad_norm": 0.432722270488739, |
|
"learning_rate": 1.1229874468119805e-06, |
|
"loss": 0.7042, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.411410204901567, |
|
"grad_norm": 0.4277879297733307, |
|
"learning_rate": 1.1141226986019033e-06, |
|
"loss": 0.7033, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.413820811570912, |
|
"grad_norm": 0.4275243282318115, |
|
"learning_rate": 1.1052886888582503e-06, |
|
"loss": 0.6984, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.4162314182402573, |
|
"grad_norm": 0.44211599230766296, |
|
"learning_rate": 1.096485487460715e-06, |
|
"loss": 0.6835, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.418642024909602, |
|
"grad_norm": 0.44883278012275696, |
|
"learning_rate": 1.0877131640452842e-06, |
|
"loss": 0.7949, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 0.4560227692127228, |
|
"learning_rate": 1.0789717880036983e-06, |
|
"loss": 0.7416, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.4234632382482926, |
|
"grad_norm": 0.4234108030796051, |
|
"learning_rate": 1.0702614284828833e-06, |
|
"loss": 0.649, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.4258738449176374, |
|
"grad_norm": 0.5065224766731262, |
|
"learning_rate": 1.0615821543844257e-06, |
|
"loss": 0.7943, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.4282844515869826, |
|
"grad_norm": 0.44325506687164307, |
|
"learning_rate": 1.052934034364011e-06, |
|
"loss": 0.6141, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.430695058256328, |
|
"grad_norm": 0.4514535367488861, |
|
"learning_rate": 1.0443171368308879e-06, |
|
"loss": 0.7904, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.433105664925673, |
|
"grad_norm": 0.4588463008403778, |
|
"learning_rate": 1.035731529947326e-06, |
|
"loss": 0.8362, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.4355162715950183, |
|
"grad_norm": 0.4419898986816406, |
|
"learning_rate": 1.0271772816280755e-06, |
|
"loss": 0.6919, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.437926878264363, |
|
"grad_norm": 0.4751293659210205, |
|
"learning_rate": 1.0186544595398306e-06, |
|
"loss": 0.6529, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.4403374849337083, |
|
"grad_norm": 0.44933363795280457, |
|
"learning_rate": 1.0101631311006998e-06, |
|
"loss": 0.7517, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.4427480916030535, |
|
"grad_norm": 0.3989573121070862, |
|
"learning_rate": 1.0017033634796575e-06, |
|
"loss": 0.6888, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.4451586982723987, |
|
"grad_norm": 0.4670220613479614, |
|
"learning_rate": 9.93275223596033e-07, |
|
"loss": 0.768, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.4475693049417435, |
|
"grad_norm": 0.4125686585903168, |
|
"learning_rate": 9.848787781189607e-07, |
|
"loss": 0.6586, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.4499799116110887, |
|
"grad_norm": 0.49060118198394775, |
|
"learning_rate": 9.765140934668705e-07, |
|
"loss": 0.8045, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.452390518280434, |
|
"grad_norm": 0.46147778630256653, |
|
"learning_rate": 9.681812358069508e-07, |
|
"loss": 0.753, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.454801124949779, |
|
"grad_norm": 0.42407697439193726, |
|
"learning_rate": 9.5988027105463e-07, |
|
"loss": 0.6705, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.457211731619124, |
|
"grad_norm": 0.4550100564956665, |
|
"learning_rate": 9.516112648730524e-07, |
|
"loss": 0.8285, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.459622338288469, |
|
"grad_norm": 0.4081559479236603, |
|
"learning_rate": 9.433742826725628e-07, |
|
"loss": 0.6958, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.4620329449578144, |
|
"grad_norm": 0.45342516899108887, |
|
"learning_rate": 9.351693896101826e-07, |
|
"loss": 0.7204, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.4644435516271597, |
|
"grad_norm": 0.5125803351402283, |
|
"learning_rate": 9.269966505891064e-07, |
|
"loss": 0.829, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.4668541582965045, |
|
"grad_norm": 0.3878057897090912, |
|
"learning_rate": 9.188561302581689e-07, |
|
"loss": 0.6477, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.4692647649658497, |
|
"grad_norm": 0.4437485337257385, |
|
"learning_rate": 9.107478930113555e-07, |
|
"loss": 0.7745, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.471675371635195, |
|
"grad_norm": 0.44392943382263184, |
|
"learning_rate": 9.026720029872782e-07, |
|
"loss": 0.7788, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.47408597830454, |
|
"grad_norm": 0.40301457047462463, |
|
"learning_rate": 8.946285240686725e-07, |
|
"loss": 0.7646, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.476496584973885, |
|
"grad_norm": 0.39164724946022034, |
|
"learning_rate": 8.866175198818927e-07, |
|
"loss": 0.6864, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.47890719164323, |
|
"grad_norm": 0.4614250659942627, |
|
"learning_rate": 8.78639053796409e-07, |
|
"loss": 0.7748, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.4813177983125754, |
|
"grad_norm": 0.3880789279937744, |
|
"learning_rate": 8.706931889243042e-07, |
|
"loss": 0.713, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.4837284049819206, |
|
"grad_norm": 0.4589103162288666, |
|
"learning_rate": 8.627799881197768e-07, |
|
"loss": 0.7714, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.4861390116512654, |
|
"grad_norm": 0.46418026089668274, |
|
"learning_rate": 8.548995139786398e-07, |
|
"loss": 0.7834, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.4885496183206106, |
|
"grad_norm": 0.4262266755104065, |
|
"learning_rate": 8.470518288378343e-07, |
|
"loss": 0.725, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.490960224989956, |
|
"grad_norm": 0.4840236008167267, |
|
"learning_rate": 8.392369947749224e-07, |
|
"loss": 0.7149, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.493370831659301, |
|
"grad_norm": 0.4422532320022583, |
|
"learning_rate": 8.314550736076115e-07, |
|
"loss": 0.7639, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.495781438328646, |
|
"grad_norm": 0.430061936378479, |
|
"learning_rate": 8.23706126893255e-07, |
|
"loss": 0.7445, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.498192044997991, |
|
"grad_norm": 0.4430009424686432, |
|
"learning_rate": 8.159902159283672e-07, |
|
"loss": 0.7509, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.5006026516673363, |
|
"grad_norm": 0.4133704602718353, |
|
"learning_rate": 8.083074017481412e-07, |
|
"loss": 0.6887, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.5030132583366815, |
|
"grad_norm": 0.42407360672950745, |
|
"learning_rate": 8.006577451259645e-07, |
|
"loss": 0.6875, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.5054238650060263, |
|
"grad_norm": 0.48745691776275635, |
|
"learning_rate": 7.930413065729365e-07, |
|
"loss": 0.8413, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.5078344716753715, |
|
"grad_norm": 0.44253039360046387, |
|
"learning_rate": 7.854581463373967e-07, |
|
"loss": 0.6515, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.5102450783447168, |
|
"grad_norm": 0.4438078701496124, |
|
"learning_rate": 7.779083244044344e-07, |
|
"loss": 0.7446, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.512655685014062, |
|
"grad_norm": 0.3886706531047821, |
|
"learning_rate": 7.703919004954324e-07, |
|
"loss": 0.7701, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.5150662916834072, |
|
"grad_norm": 0.426519513130188, |
|
"learning_rate": 7.629089340675767e-07, |
|
"loss": 0.7135, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.517476898352752, |
|
"grad_norm": 0.4438495934009552, |
|
"learning_rate": 7.554594843134011e-07, |
|
"loss": 0.7712, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.5198875050220972, |
|
"grad_norm": 0.40177780389785767, |
|
"learning_rate": 7.480436101603077e-07, |
|
"loss": 0.7393, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.5222981116914425, |
|
"grad_norm": 0.4233896732330322, |
|
"learning_rate": 7.406613702701082e-07, |
|
"loss": 0.711, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.5247087183607873, |
|
"grad_norm": 0.4507977366447449, |
|
"learning_rate": 7.333128230385562e-07, |
|
"loss": 0.7908, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.5271193250301325, |
|
"grad_norm": 0.4279252886772156, |
|
"learning_rate": 7.259980265948846e-07, |
|
"loss": 0.7375, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.5295299316994777, |
|
"grad_norm": 0.40255483984947205, |
|
"learning_rate": 7.187170388013493e-07, |
|
"loss": 0.7442, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.531940538368823, |
|
"grad_norm": 0.4162873923778534, |
|
"learning_rate": 7.114699172527712e-07, |
|
"loss": 0.7179, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.534351145038168, |
|
"grad_norm": 0.4078782796859741, |
|
"learning_rate": 7.04256719276073e-07, |
|
"loss": 0.7426, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.536761751707513, |
|
"grad_norm": 0.4431638717651367, |
|
"learning_rate": 6.970775019298387e-07, |
|
"loss": 0.7626, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.539172358376858, |
|
"grad_norm": 0.3994295001029968, |
|
"learning_rate": 6.899323220038512e-07, |
|
"loss": 0.7119, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.5415829650462034, |
|
"grad_norm": 0.44838520884513855, |
|
"learning_rate": 6.82821236018647e-07, |
|
"loss": 0.7313, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.543993571715548, |
|
"grad_norm": 0.45753780007362366, |
|
"learning_rate": 6.757443002250708e-07, |
|
"loss": 0.8166, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.5464041783848934, |
|
"grad_norm": 0.4335336983203888, |
|
"learning_rate": 6.68701570603828e-07, |
|
"loss": 0.7235, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.5488147850542386, |
|
"grad_norm": 0.36801406741142273, |
|
"learning_rate": 6.616931028650431e-07, |
|
"loss": 0.6273, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.551225391723584, |
|
"grad_norm": 0.45672011375427246, |
|
"learning_rate": 6.547189524478182e-07, |
|
"loss": 0.8401, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.553635998392929, |
|
"grad_norm": 0.460497111082077, |
|
"learning_rate": 6.477791745197948e-07, |
|
"loss": 0.7665, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.556046605062274, |
|
"grad_norm": 0.40810006856918335, |
|
"learning_rate": 6.40873823976722e-07, |
|
"loss": 0.7092, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.558457211731619, |
|
"grad_norm": 0.43864303827285767, |
|
"learning_rate": 6.340029554420091e-07, |
|
"loss": 0.7317, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.5608678184009643, |
|
"grad_norm": 0.4277905225753784, |
|
"learning_rate": 6.271666232663104e-07, |
|
"loss": 0.7217, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.563278425070309, |
|
"grad_norm": 0.4536210298538208, |
|
"learning_rate": 6.20364881527083e-07, |
|
"loss": 0.7706, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.5656890317396543, |
|
"grad_norm": 0.40196314454078674, |
|
"learning_rate": 6.135977840281648e-07, |
|
"loss": 0.7011, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.5680996384089996, |
|
"grad_norm": 0.4371301233768463, |
|
"learning_rate": 6.068653842993466e-07, |
|
"loss": 0.737, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.570510245078345, |
|
"grad_norm": 0.42937684059143066, |
|
"learning_rate": 6.00167735595949e-07, |
|
"loss": 0.7785, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.57292085174769, |
|
"grad_norm": 0.44304633140563965, |
|
"learning_rate": 5.935048908984004e-07, |
|
"loss": 0.6764, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.575331458417035, |
|
"grad_norm": 0.46503859758377075, |
|
"learning_rate": 5.868769029118243e-07, |
|
"loss": 0.7118, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.57774206508638, |
|
"grad_norm": 0.43988868594169617, |
|
"learning_rate": 5.802838240656089e-07, |
|
"loss": 0.6524, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.5801526717557253, |
|
"grad_norm": 0.4378841519355774, |
|
"learning_rate": 5.737257065130087e-07, |
|
"loss": 0.7939, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.58256327842507, |
|
"grad_norm": 0.4189451038837433, |
|
"learning_rate": 5.672026021307169e-07, |
|
"loss": 0.6405, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.5849738850944153, |
|
"grad_norm": 0.48448026180267334, |
|
"learning_rate": 5.607145625184651e-07, |
|
"loss": 0.7384, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.5873844917637605, |
|
"grad_norm": 0.42256635427474976, |
|
"learning_rate": 5.542616389986144e-07, |
|
"loss": 0.698, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.5897950984331057, |
|
"grad_norm": 0.448444128036499, |
|
"learning_rate": 5.478438826157412e-07, |
|
"loss": 0.7934, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.592205705102451, |
|
"grad_norm": 0.41748157143592834, |
|
"learning_rate": 5.414613441362438e-07, |
|
"loss": 0.7883, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.5946163117717957, |
|
"grad_norm": 0.4418646991252899, |
|
"learning_rate": 5.35114074047935e-07, |
|
"loss": 0.7271, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.597026918441141, |
|
"grad_norm": 0.43920382857322693, |
|
"learning_rate": 5.288021225596434e-07, |
|
"loss": 0.7076, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.599437525110486, |
|
"grad_norm": 0.38255125284194946, |
|
"learning_rate": 5.225255396008172e-07, |
|
"loss": 0.7452, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.601848131779831, |
|
"grad_norm": 0.41653716564178467, |
|
"learning_rate": 5.162843748211277e-07, |
|
"loss": 0.7739, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.604258738449176, |
|
"grad_norm": 0.4101213216781616, |
|
"learning_rate": 5.10078677590079e-07, |
|
"loss": 0.6833, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.6066693451185214, |
|
"grad_norm": 0.4514157474040985, |
|
"learning_rate": 5.03908496996618e-07, |
|
"loss": 0.7694, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.6090799517878667, |
|
"grad_norm": 0.4114135503768921, |
|
"learning_rate": 4.977738818487382e-07, |
|
"loss": 0.7425, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.611490558457212, |
|
"grad_norm": 0.4218822419643402, |
|
"learning_rate": 4.916748806731081e-07, |
|
"loss": 0.7634, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.613901165126557, |
|
"grad_norm": 0.4250275790691376, |
|
"learning_rate": 4.856115417146689e-07, |
|
"loss": 0.6758, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.616311771795902, |
|
"grad_norm": 0.4057084918022156, |
|
"learning_rate": 4.795839129362729e-07, |
|
"loss": 0.7127, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.618722378465247, |
|
"grad_norm": 0.4077472388744354, |
|
"learning_rate": 4.735920420182871e-07, |
|
"loss": 0.7175, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.6211329851345924, |
|
"grad_norm": 0.4244491755962372, |
|
"learning_rate": 4.6763597635822557e-07, |
|
"loss": 0.6854, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.623543591803937, |
|
"grad_norm": 0.42558592557907104, |
|
"learning_rate": 4.6171576307037226e-07, |
|
"loss": 0.8152, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.6259541984732824, |
|
"grad_norm": 0.3767087757587433, |
|
"learning_rate": 4.558314489854071e-07, |
|
"loss": 0.6877, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.6283648051426276, |
|
"grad_norm": 0.42351633310317993, |
|
"learning_rate": 4.49983080650036e-07, |
|
"loss": 0.7761, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.630775411811973, |
|
"grad_norm": 0.46133315563201904, |
|
"learning_rate": 4.4417070432662633e-07, |
|
"loss": 0.8215, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.633186018481318, |
|
"grad_norm": 0.3787693977355957, |
|
"learning_rate": 4.3839436599283135e-07, |
|
"loss": 0.6506, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.635596625150663, |
|
"grad_norm": 0.41316670179367065, |
|
"learning_rate": 4.3265411134124023e-07, |
|
"loss": 0.7557, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.638007231820008, |
|
"grad_norm": 0.4531557857990265, |
|
"learning_rate": 4.269499857790049e-07, |
|
"loss": 0.8016, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.6404178384893533, |
|
"grad_norm": 0.44609588384628296, |
|
"learning_rate": 4.212820344274859e-07, |
|
"loss": 0.7169, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.642828445158698, |
|
"grad_norm": 0.46584153175354004, |
|
"learning_rate": 4.1565030212189495e-07, |
|
"loss": 0.7782, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.6452390518280433, |
|
"grad_norm": 0.4595951735973358, |
|
"learning_rate": 4.100548334109411e-07, |
|
"loss": 0.7249, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.6476496584973885, |
|
"grad_norm": 0.4180499315261841, |
|
"learning_rate": 4.0449567255647637e-07, |
|
"loss": 0.7308, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.6500602651667338, |
|
"grad_norm": 0.41458413004875183, |
|
"learning_rate": 3.9897286353314635e-07, |
|
"loss": 0.7157, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.652470871836079, |
|
"grad_norm": 0.42421478033065796, |
|
"learning_rate": 3.934864500280433e-07, |
|
"loss": 0.7306, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.6548814785054238, |
|
"grad_norm": 0.42849603295326233, |
|
"learning_rate": 3.8803647544036147e-07, |
|
"loss": 0.7393, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.657292085174769, |
|
"grad_norm": 0.4576681852340698, |
|
"learning_rate": 3.826229828810474e-07, |
|
"loss": 0.7787, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.6597026918441142, |
|
"grad_norm": 0.42885369062423706, |
|
"learning_rate": 3.7724601517246937e-07, |
|
"loss": 0.7948, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.662113298513459, |
|
"grad_norm": 0.42016690969467163, |
|
"learning_rate": 3.7190561484807043e-07, |
|
"loss": 0.7134, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.6645239051828042, |
|
"grad_norm": 0.45345714688301086, |
|
"learning_rate": 3.666018241520336e-07, |
|
"loss": 0.7535, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.6669345118521495, |
|
"grad_norm": 0.4342009723186493, |
|
"learning_rate": 3.61334685038951e-07, |
|
"loss": 0.7769, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.6693451185214947, |
|
"grad_norm": 0.37186241149902344, |
|
"learning_rate": 3.56104239173487e-07, |
|
"loss": 0.6666, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.67175572519084, |
|
"grad_norm": 0.4058228135108948, |
|
"learning_rate": 3.509105279300529e-07, |
|
"loss": 0.7415, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.6741663318601847, |
|
"grad_norm": 0.44539380073547363, |
|
"learning_rate": 3.457535923924782e-07, |
|
"loss": 0.8494, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.67657693852953, |
|
"grad_norm": 0.40209490060806274, |
|
"learning_rate": 3.406334733536842e-07, |
|
"loss": 0.715, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.678987545198875, |
|
"grad_norm": 0.42924532294273376, |
|
"learning_rate": 3.355502113153658e-07, |
|
"loss": 0.7855, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.68139815186822, |
|
"grad_norm": 0.4147682785987854, |
|
"learning_rate": 3.305038464876614e-07, |
|
"loss": 0.7437, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.683808758537565, |
|
"grad_norm": 0.39267468452453613, |
|
"learning_rate": 3.254944187888481e-07, |
|
"loss": 0.6857, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.6862193652069104, |
|
"grad_norm": 0.4079437553882599, |
|
"learning_rate": 3.2052196784501553e-07, |
|
"loss": 0.793, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.6886299718762556, |
|
"grad_norm": 0.43800613284111023, |
|
"learning_rate": 3.1558653298975615e-07, |
|
"loss": 0.7174, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.691040578545601, |
|
"grad_norm": 0.41524577140808105, |
|
"learning_rate": 3.106881532638556e-07, |
|
"loss": 0.7898, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.6934511852149456, |
|
"grad_norm": 0.43179449439048767, |
|
"learning_rate": 3.058268674149806e-07, |
|
"loss": 0.7416, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.695861791884291, |
|
"grad_norm": 0.42904040217399597, |
|
"learning_rate": 3.0100271389737365e-07, |
|
"loss": 0.7395, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.698272398553636, |
|
"grad_norm": 0.44881823658943176, |
|
"learning_rate": 2.9621573087155297e-07, |
|
"loss": 0.7253, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.700683005222981, |
|
"grad_norm": 0.508668839931488, |
|
"learning_rate": 2.9146595620400063e-07, |
|
"loss": 0.7904, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.703093611892326, |
|
"grad_norm": 0.3844849467277527, |
|
"learning_rate": 2.867534274668765e-07, |
|
"loss": 0.6584, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.7055042185616713, |
|
"grad_norm": 0.4445479214191437, |
|
"learning_rate": 2.820781819377066e-07, |
|
"loss": 0.8885, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.7079148252310166, |
|
"grad_norm": 0.3862084448337555, |
|
"learning_rate": 2.774402565991019e-07, |
|
"loss": 0.5594, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.710325431900362, |
|
"grad_norm": 0.4360179901123047, |
|
"learning_rate": 2.728396881384554e-07, |
|
"loss": 0.8483, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.7127360385697066, |
|
"grad_norm": 0.39546507596969604, |
|
"learning_rate": 2.682765129476578e-07, |
|
"loss": 0.7768, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.715146645239052, |
|
"grad_norm": 0.4252803325653076, |
|
"learning_rate": 2.637507671228057e-07, |
|
"loss": 0.7311, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.717557251908397, |
|
"grad_norm": 0.4036024510860443, |
|
"learning_rate": 2.592624864639204e-07, |
|
"loss": 0.7896, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.719967858577742, |
|
"grad_norm": 0.3788321018218994, |
|
"learning_rate": 2.548117064746608e-07, |
|
"loss": 0.608, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.722378465247087, |
|
"grad_norm": 0.4848822057247162, |
|
"learning_rate": 2.503984623620459e-07, |
|
"loss": 0.8314, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.7247890719164323, |
|
"grad_norm": 0.4506436288356781, |
|
"learning_rate": 2.4602278903617104e-07, |
|
"loss": 0.7435, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.7271996785857775, |
|
"grad_norm": 0.4042951166629791, |
|
"learning_rate": 2.416847211099393e-07, |
|
"loss": 0.7064, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.7296102852551227, |
|
"grad_norm": 0.39260706305503845, |
|
"learning_rate": 2.3738429289878185e-07, |
|
"loss": 0.6931, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.7320208919244675, |
|
"grad_norm": 0.42424336075782776, |
|
"learning_rate": 2.3312153842038775e-07, |
|
"loss": 0.8133, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.7344314985938127, |
|
"grad_norm": 0.40185147523880005, |
|
"learning_rate": 2.2889649139443637e-07, |
|
"loss": 0.6828, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.736842105263158, |
|
"grad_norm": 0.4371879994869232, |
|
"learning_rate": 2.2470918524232944e-07, |
|
"loss": 0.8165, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.7392527119325027, |
|
"grad_norm": 0.3777294158935547, |
|
"learning_rate": 2.2055965308692694e-07, |
|
"loss": 0.5916, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.741663318601848, |
|
"grad_norm": 0.4236375391483307, |
|
"learning_rate": 2.1644792775228462e-07, |
|
"loss": 0.8484, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.744073925271193, |
|
"grad_norm": 0.37342947721481323, |
|
"learning_rate": 2.123740417633946e-07, |
|
"loss": 0.7169, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.7464845319405384, |
|
"grad_norm": 0.41240277886390686, |
|
"learning_rate": 2.083380273459318e-07, |
|
"loss": 0.7049, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.7488951386098837, |
|
"grad_norm": 0.45906880497932434, |
|
"learning_rate": 2.0433991642598904e-07, |
|
"loss": 0.8, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.7513057452792284, |
|
"grad_norm": 0.3875437378883362, |
|
"learning_rate": 2.003797406298369e-07, |
|
"loss": 0.6369, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.7537163519485737, |
|
"grad_norm": 0.43214669823646545, |
|
"learning_rate": 1.964575312836653e-07, |
|
"loss": 0.7569, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.756126958617919, |
|
"grad_norm": 0.4286872446537018, |
|
"learning_rate": 1.9257331941333723e-07, |
|
"loss": 0.7256, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.758537565287264, |
|
"grad_norm": 0.42117825150489807, |
|
"learning_rate": 1.8872713574414614e-07, |
|
"loss": 0.7668, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.760948171956609, |
|
"grad_norm": 0.4113220274448395, |
|
"learning_rate": 1.8491901070056883e-07, |
|
"loss": 0.7688, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.763358778625954, |
|
"grad_norm": 0.38226398825645447, |
|
"learning_rate": 1.8114897440602796e-07, |
|
"loss": 0.7198, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.7657693852952994, |
|
"grad_norm": 0.4174339175224304, |
|
"learning_rate": 1.7741705668265387e-07, |
|
"loss": 0.704, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.7681799919646446, |
|
"grad_norm": 0.4335969090461731, |
|
"learning_rate": 1.7372328705104356e-07, |
|
"loss": 0.7753, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.77059059863399, |
|
"grad_norm": 0.3932461142539978, |
|
"learning_rate": 1.7006769473003614e-07, |
|
"loss": 0.6644, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.7730012053033346, |
|
"grad_norm": 0.3866373598575592, |
|
"learning_rate": 1.6645030863647217e-07, |
|
"loss": 0.6912, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.77541181197268, |
|
"grad_norm": 0.4320269227027893, |
|
"learning_rate": 1.6287115738497228e-07, |
|
"loss": 0.8269, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.777822418642025, |
|
"grad_norm": 0.3798907697200775, |
|
"learning_rate": 1.593302692877069e-07, |
|
"loss": 0.6695, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.78023302531137, |
|
"grad_norm": 0.41768330335617065, |
|
"learning_rate": 1.558276723541735e-07, |
|
"loss": 0.8033, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.782643631980715, |
|
"grad_norm": 0.4211842119693756, |
|
"learning_rate": 1.5236339429097413e-07, |
|
"loss": 0.8161, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.7850542386500603, |
|
"grad_norm": 0.3979291319847107, |
|
"learning_rate": 1.4893746250159768e-07, |
|
"loss": 0.6851, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.7874648453194055, |
|
"grad_norm": 0.4146570861339569, |
|
"learning_rate": 1.4554990408620185e-07, |
|
"loss": 0.7762, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.7898754519887508, |
|
"grad_norm": 0.3770717680454254, |
|
"learning_rate": 1.4220074584140152e-07, |
|
"loss": 0.7212, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.7922860586580955, |
|
"grad_norm": 0.4211629331111908, |
|
"learning_rate": 1.3889001426005134e-07, |
|
"loss": 0.7177, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.7946966653274408, |
|
"grad_norm": 0.38002777099609375, |
|
"learning_rate": 1.3561773553104006e-07, |
|
"loss": 0.6302, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.797107271996786, |
|
"grad_norm": 0.46824517846107483, |
|
"learning_rate": 1.3238393553908602e-07, |
|
"loss": 0.813, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.7995178786661308, |
|
"grad_norm": 0.40996015071868896, |
|
"learning_rate": 1.2918863986452422e-07, |
|
"loss": 0.7255, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.801928485335476, |
|
"grad_norm": 0.44880595803260803, |
|
"learning_rate": 1.2603187378311342e-07, |
|
"loss": 0.8319, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.8043390920048212, |
|
"grad_norm": 0.37107914686203003, |
|
"learning_rate": 1.2291366226582724e-07, |
|
"loss": 0.6972, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.8067496986741665, |
|
"grad_norm": 0.41691240668296814, |
|
"learning_rate": 1.1983402997866434e-07, |
|
"loss": 0.7689, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.8091603053435117, |
|
"grad_norm": 0.4303281605243683, |
|
"learning_rate": 1.167930012824492e-07, |
|
"loss": 0.778, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.8115709120128565, |
|
"grad_norm": 0.3736268877983093, |
|
"learning_rate": 1.1379060023263999e-07, |
|
"loss": 0.6211, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.8139815186822017, |
|
"grad_norm": 0.4501170217990875, |
|
"learning_rate": 1.1082685057913711e-07, |
|
"loss": 0.7245, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.816392125351547, |
|
"grad_norm": 0.44469213485717773, |
|
"learning_rate": 1.0790177576609939e-07, |
|
"loss": 0.8476, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.8188027320208917, |
|
"grad_norm": 0.38670194149017334, |
|
"learning_rate": 1.0501539893175316e-07, |
|
"loss": 0.6396, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.821213338690237, |
|
"grad_norm": 0.4098925292491913, |
|
"learning_rate": 1.0216774290821518e-07, |
|
"loss": 0.6748, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.823623945359582, |
|
"grad_norm": 0.4458259642124176, |
|
"learning_rate": 9.935883022130554e-08, |
|
"loss": 0.7751, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.8260345520289274, |
|
"grad_norm": 0.40821295976638794, |
|
"learning_rate": 9.658868309037506e-08, |
|
"loss": 0.8317, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.8284451586982726, |
|
"grad_norm": 0.3658338189125061, |
|
"learning_rate": 9.385732342812703e-08, |
|
"loss": 0.6081, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.8308557653676174, |
|
"grad_norm": 0.4430789053440094, |
|
"learning_rate": 9.116477284044245e-08, |
|
"loss": 0.7368, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.8332663720369626, |
|
"grad_norm": 0.42784419655799866, |
|
"learning_rate": 8.85110526262134e-08, |
|
"loss": 0.7206, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.835676978706308, |
|
"grad_norm": 0.4028724133968353, |
|
"learning_rate": 8.589618377716934e-08, |
|
"loss": 0.7697, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.8380875853756526, |
|
"grad_norm": 0.38379377126693726, |
|
"learning_rate": 8.33201869777156e-08, |
|
"loss": 0.711, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.840498192044998, |
|
"grad_norm": 0.41850024461746216, |
|
"learning_rate": 8.078308260476675e-08, |
|
"loss": 0.7407, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.842908798714343, |
|
"grad_norm": 0.41380420327186584, |
|
"learning_rate": 7.828489072758682e-08, |
|
"loss": 0.7465, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.8453194053836883, |
|
"grad_norm": 0.4405228793621063, |
|
"learning_rate": 7.582563110763108e-08, |
|
"loss": 0.8706, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.8477300120530336, |
|
"grad_norm": 0.36811959743499756, |
|
"learning_rate": 7.340532319838611e-08, |
|
"loss": 0.6122, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.8501406187223783, |
|
"grad_norm": 0.4712268114089966, |
|
"learning_rate": 7.102398614522221e-08, |
|
"loss": 0.8492, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.8525512253917236, |
|
"grad_norm": 0.417009174823761, |
|
"learning_rate": 6.868163878523626e-08, |
|
"loss": 0.6338, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.854961832061069, |
|
"grad_norm": 0.40389665961265564, |
|
"learning_rate": 6.637829964710574e-08, |
|
"loss": 0.7407, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.8573724387304136, |
|
"grad_norm": 0.4020982086658478, |
|
"learning_rate": 6.411398695093995e-08, |
|
"loss": 0.78, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.859783045399759, |
|
"grad_norm": 0.39244982600212097, |
|
"learning_rate": 6.18887186081385e-08, |
|
"loss": 0.7395, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.862193652069104, |
|
"grad_norm": 0.4252009391784668, |
|
"learning_rate": 5.970251222124802e-08, |
|
"loss": 0.6954, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.8646042587384493, |
|
"grad_norm": 0.5251843333244324, |
|
"learning_rate": 5.7555385083823964e-08, |
|
"loss": 0.7877, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.8670148654077945, |
|
"grad_norm": 0.43413203954696655, |
|
"learning_rate": 5.544735418029079e-08, |
|
"loss": 0.6972, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.8694254720771393, |
|
"grad_norm": 0.43542006611824036, |
|
"learning_rate": 5.3378436185813065e-08, |
|
"loss": 0.7352, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.8718360787464845, |
|
"grad_norm": 0.4059699773788452, |
|
"learning_rate": 5.134864746615786e-08, |
|
"loss": 0.6569, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.8742466854158297, |
|
"grad_norm": 0.3778529763221741, |
|
"learning_rate": 4.9358004077568186e-08, |
|
"loss": 0.7266, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.8766572920851745, |
|
"grad_norm": 0.4311237633228302, |
|
"learning_rate": 4.7406521766637515e-08, |
|
"loss": 0.7329, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.8790678987545197, |
|
"grad_norm": 0.44225457310676575, |
|
"learning_rate": 4.5494215970181555e-08, |
|
"loss": 0.8012, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.881478505423865, |
|
"grad_norm": 0.42106395959854126, |
|
"learning_rate": 4.3621101815117804e-08, |
|
"loss": 0.6241, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.88388911209321, |
|
"grad_norm": 0.43901902437210083, |
|
"learning_rate": 4.178719411834675e-08, |
|
"loss": 0.8404, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.8862997187625554, |
|
"grad_norm": 0.3896089494228363, |
|
"learning_rate": 3.9992507386633626e-08, |
|
"loss": 0.6031, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.8887103254319, |
|
"grad_norm": 0.4170692563056946, |
|
"learning_rate": 3.8237055816495174e-08, |
|
"loss": 0.7202, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.8911209321012454, |
|
"grad_norm": 0.4366650879383087, |
|
"learning_rate": 3.652085329408306e-08, |
|
"loss": 0.7928, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.8935315387705907, |
|
"grad_norm": 0.4156922996044159, |
|
"learning_rate": 3.48439133950812e-08, |
|
"loss": 0.7654, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.8959421454399354, |
|
"grad_norm": 0.3936680257320404, |
|
"learning_rate": 3.3206249384590826e-08, |
|
"loss": 0.7042, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.8983527521092807, |
|
"grad_norm": 0.4136912226676941, |
|
"learning_rate": 3.1607874217030045e-08, |
|
"loss": 0.7204, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.900763358778626, |
|
"grad_norm": 0.4260861277580261, |
|
"learning_rate": 3.0048800536031654e-08, |
|
"loss": 0.7653, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.903173965447971, |
|
"grad_norm": 0.4011975824832916, |
|
"learning_rate": 2.8529040674339926e-08, |
|
"loss": 0.7589, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.9055845721173164, |
|
"grad_norm": 0.3969016373157501, |
|
"learning_rate": 2.704860665371567e-08, |
|
"loss": 0.6809, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.9079951787866616, |
|
"grad_norm": 0.41906648874282837, |
|
"learning_rate": 2.5607510184840757e-08, |
|
"loss": 0.7514, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.9104057854560064, |
|
"grad_norm": 0.3886389434337616, |
|
"learning_rate": 2.4205762667223743e-08, |
|
"loss": 0.7422, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.9128163921253516, |
|
"grad_norm": 0.4247673749923706, |
|
"learning_rate": 2.2843375189113835e-08, |
|
"loss": 0.7625, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.915226998794697, |
|
"grad_norm": 0.41922637820243835, |
|
"learning_rate": 2.152035852740708e-08, |
|
"loss": 0.6691, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.9176376054640416, |
|
"grad_norm": 0.4272564649581909, |
|
"learning_rate": 2.0236723147566972e-08, |
|
"loss": 0.7665, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.920048212133387, |
|
"grad_norm": 0.3958934247493744, |
|
"learning_rate": 1.8992479203537305e-08, |
|
"loss": 0.7214, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.922458818802732, |
|
"grad_norm": 0.42744383215904236, |
|
"learning_rate": 1.778763653766502e-08, |
|
"loss": 0.7564, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.9248694254720773, |
|
"grad_norm": 0.4064633548259735, |
|
"learning_rate": 1.662220468062081e-08, |
|
"loss": 0.7692, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.9272800321414225, |
|
"grad_norm": 0.3730533719062805, |
|
"learning_rate": 1.549619285132309e-08, |
|
"loss": 0.7338, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.9296906388107673, |
|
"grad_norm": 0.3945830762386322, |
|
"learning_rate": 1.440960995686691e-08, |
|
"loss": 0.7336, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.9321012454801125, |
|
"grad_norm": 0.44148439168930054, |
|
"learning_rate": 1.336246459245183e-08, |
|
"loss": 0.7758, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.9345118521494578, |
|
"grad_norm": 0.3946518898010254, |
|
"learning_rate": 1.235476504131472e-08, |
|
"loss": 0.6989, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.9369224588188025, |
|
"grad_norm": 0.3923145830631256, |
|
"learning_rate": 1.1386519274664831e-08, |
|
"loss": 0.6836, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.9393330654881478, |
|
"grad_norm": 0.4212956726551056, |
|
"learning_rate": 1.0457734951618282e-08, |
|
"loss": 0.7662, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.941743672157493, |
|
"grad_norm": 0.43960368633270264, |
|
"learning_rate": 9.568419419141995e-09, |
|
"loss": 0.7636, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.9441542788268382, |
|
"grad_norm": 0.4093663692474365, |
|
"learning_rate": 8.71857971198986e-09, |
|
"loss": 0.7587, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.9465648854961835, |
|
"grad_norm": 0.4116091728210449, |
|
"learning_rate": 7.908222552651667e-09, |
|
"loss": 0.7228, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.9489754921655282, |
|
"grad_norm": 0.41725078225135803, |
|
"learning_rate": 7.137354351298698e-09, |
|
"loss": 0.785, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.9513860988348735, |
|
"grad_norm": 0.4339212477207184, |
|
"learning_rate": 6.405981205730994e-09, |
|
"loss": 0.6493, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.9537967055042187, |
|
"grad_norm": 0.4500887393951416, |
|
"learning_rate": 5.7141089013318384e-09, |
|
"loss": 0.7793, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.9562073121735635, |
|
"grad_norm": 0.3880206346511841, |
|
"learning_rate": 5.061742911021683e-09, |
|
"loss": 0.7468, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.9586179188429087, |
|
"grad_norm": 0.4332124590873718, |
|
"learning_rate": 4.448888395211515e-09, |
|
"loss": 0.7158, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.961028525512254, |
|
"grad_norm": 0.4175040125846863, |
|
"learning_rate": 3.875550201767331e-09, |
|
"loss": 0.7647, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.963439132181599, |
|
"grad_norm": 0.400880366563797, |
|
"learning_rate": 3.3417328659673952e-09, |
|
"loss": 0.703, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.9658497388509444, |
|
"grad_norm": 0.4085295796394348, |
|
"learning_rate": 2.8474406104689323e-09, |
|
"loss": 0.7268, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.968260345520289, |
|
"grad_norm": 0.4471244215965271, |
|
"learning_rate": 2.3926773452731534e-09, |
|
"loss": 0.7511, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.9706709521896344, |
|
"grad_norm": 0.42770814895629883, |
|
"learning_rate": 1.977446667695282e-09, |
|
"loss": 0.7345, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.9730815588589796, |
|
"grad_norm": 0.4185887277126312, |
|
"learning_rate": 1.6017518623356876e-09, |
|
"loss": 0.7618, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.9754921655283244, |
|
"grad_norm": 0.4077529311180115, |
|
"learning_rate": 1.2655959010537954e-09, |
|
"loss": 0.6774, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.9779027721976696, |
|
"grad_norm": 0.4341370463371277, |
|
"learning_rate": 9.68981442945327e-10, |
|
"loss": 0.8016, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.980313378867015, |
|
"grad_norm": 0.4078274369239807, |
|
"learning_rate": 7.119108343206505e-10, |
|
"loss": 0.6871, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.98272398553636, |
|
"grad_norm": 0.41902077198028564, |
|
"learning_rate": 4.943861086864621e-10, |
|
"loss": 0.778, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.9851345922057053, |
|
"grad_norm": 0.46550217270851135, |
|
"learning_rate": 3.1640898672802287e-10, |
|
"loss": 0.7477, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.98754519887505, |
|
"grad_norm": 0.4195660948753357, |
|
"learning_rate": 1.779808762997215e-10, |
|
"loss": 0.7426, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.9899558055443953, |
|
"grad_norm": 0.4319990873336792, |
|
"learning_rate": 7.910287240842085e-11, |
|
"loss": 0.7368, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.9923664122137406, |
|
"grad_norm": 0.39619678258895874, |
|
"learning_rate": 1.9775757210127588e-11, |
|
"loss": 0.701, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.9947770188830853, |
|
"grad_norm": 0.3986462950706482, |
|
"learning_rate": 0.0, |
|
"loss": 0.7199, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.9947770188830853, |
|
"step": 1242, |
|
"total_flos": 1071869892476928.0, |
|
"train_loss": 0.8010726735691709, |
|
"train_runtime": 59534.8116, |
|
"train_samples_per_second": 2.007, |
|
"train_steps_per_second": 0.021 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1242, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1071869892476928.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|