|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 750, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 6.988942623138428, |
|
"learning_rate": 1.3333333333333336e-07, |
|
"loss": 1.2845, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 6.956341743469238, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 1.2808, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012, |
|
"grad_norm": 6.937110900878906, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.2611, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 6.7839250564575195, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 1.2732, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.882997989654541, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 1.2664, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 6.942823886871338, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.268, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028, |
|
"grad_norm": 6.420243740081787, |
|
"learning_rate": 9.333333333333334e-07, |
|
"loss": 1.2577, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 6.184328556060791, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 1.2439, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.036, |
|
"grad_norm": 6.088969707489014, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.2432, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.597912788391113, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 1.1761, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044, |
|
"grad_norm": 4.545207500457764, |
|
"learning_rate": 1.4666666666666669e-06, |
|
"loss": 1.1907, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 4.477672100067139, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.1929, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052, |
|
"grad_norm": 2.8524184226989746, |
|
"learning_rate": 1.7333333333333336e-06, |
|
"loss": 1.1881, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.056, |
|
"grad_norm": 2.8725757598876953, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 1.1643, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.601120948791504, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.1545, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 2.457542657852173, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 1.1581, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.068, |
|
"grad_norm": 2.867591381072998, |
|
"learning_rate": 2.266666666666667e-06, |
|
"loss": 1.1464, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 3.6930630207061768, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.1216, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.076, |
|
"grad_norm": 3.9270849227905273, |
|
"learning_rate": 2.5333333333333338e-06, |
|
"loss": 1.1291, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.724717140197754, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.1498, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.084, |
|
"grad_norm": 3.2976064682006836, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.1392, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.088, |
|
"grad_norm": 2.683112382888794, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 1.0711, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.092, |
|
"grad_norm": 2.0106770992279053, |
|
"learning_rate": 3.066666666666667e-06, |
|
"loss": 1.0696, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 1.7597943544387817, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.0776, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7341334819793701, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0791, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.104, |
|
"grad_norm": 1.6312559843063354, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 1.0203, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.108, |
|
"grad_norm": 1.4349817037582397, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.0466, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.112, |
|
"grad_norm": 1.2373616695404053, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 1.075, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.116, |
|
"grad_norm": 1.2284890413284302, |
|
"learning_rate": 3.866666666666667e-06, |
|
"loss": 1.0127, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.9730978608131409, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0196, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.124, |
|
"grad_norm": 0.9298821687698364, |
|
"learning_rate": 4.133333333333333e-06, |
|
"loss": 1.0085, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 0.9621558785438538, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 0.974, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.132, |
|
"grad_norm": 1.032867193222046, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.0066, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.136, |
|
"grad_norm": 0.9056654572486877, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 1.007, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8527935147285461, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.9797, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 0.7919298410415649, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.944, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.148, |
|
"grad_norm": 0.8432771563529968, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 0.9859, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.152, |
|
"grad_norm": 0.9072924256324768, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 0.9217, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.156, |
|
"grad_norm": 0.9000473618507385, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.0214, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8418648838996887, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.9908, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.164, |
|
"grad_norm": 0.8197311758995056, |
|
"learning_rate": 5.466666666666667e-06, |
|
"loss": 0.9556, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.168, |
|
"grad_norm": 0.7588638663291931, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.9895, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.172, |
|
"grad_norm": 0.780891478061676, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 0.9436, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.176, |
|
"grad_norm": 0.8387202620506287, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 0.9396, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7959762811660767, |
|
"learning_rate": 6e-06, |
|
"loss": 0.9163, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.184, |
|
"grad_norm": 0.7720610499382019, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 0.9423, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.188, |
|
"grad_norm": 0.8092529773712158, |
|
"learning_rate": 6.266666666666668e-06, |
|
"loss": 0.9291, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 0.6679025888442993, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.9342, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.196, |
|
"grad_norm": 0.6735404133796692, |
|
"learning_rate": 6.533333333333334e-06, |
|
"loss": 0.9692, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6841786503791809, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.9217, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.204, |
|
"grad_norm": 0.6928501725196838, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.9415, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.208, |
|
"grad_norm": 0.6858928203582764, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 0.9221, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.212, |
|
"grad_norm": 0.6531127691268921, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 0.9241, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.216, |
|
"grad_norm": 0.7553710341453552, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.938, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.8223641514778137, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.9446, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 0.7550656795501709, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 0.9678, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.228, |
|
"grad_norm": 0.8396725058555603, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.9881, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.232, |
|
"grad_norm": 0.7557951807975769, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.8889, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.236, |
|
"grad_norm": 1.0014610290527344, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 0.9487, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.796205997467041, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.9578, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.244, |
|
"grad_norm": 0.7757744789123535, |
|
"learning_rate": 8.133333333333334e-06, |
|
"loss": 0.8946, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.248, |
|
"grad_norm": 0.8658983707427979, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 0.9212, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.252, |
|
"grad_norm": 0.7084074020385742, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.9059, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.7214030623435974, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 0.9252, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6902719736099243, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.9111, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.264, |
|
"grad_norm": 0.8375234603881836, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.9215, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.268, |
|
"grad_norm": 0.6496827602386475, |
|
"learning_rate": 8.933333333333333e-06, |
|
"loss": 0.9181, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.272, |
|
"grad_norm": 0.7157948613166809, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 0.9102, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.276, |
|
"grad_norm": 0.6876959800720215, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.9628, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8131017684936523, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.9093, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.284, |
|
"grad_norm": 0.7312742471694946, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.9196, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.6563302874565125, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.9103, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.292, |
|
"grad_norm": 0.6630368828773499, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 0.912, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.296, |
|
"grad_norm": 0.5970990657806396, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 0.9267, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.676497220993042, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9362, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.304, |
|
"grad_norm": 0.9087813496589661, |
|
"learning_rate": 9.999945845889795e-06, |
|
"loss": 0.9044, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.308, |
|
"grad_norm": 0.6283800601959229, |
|
"learning_rate": 9.999783384732242e-06, |
|
"loss": 0.9154, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.312, |
|
"grad_norm": 0.7198421359062195, |
|
"learning_rate": 9.999512620046523e-06, |
|
"loss": 0.9165, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.316, |
|
"grad_norm": 0.7074084877967834, |
|
"learning_rate": 9.99913355769784e-06, |
|
"loss": 0.888, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7027278542518616, |
|
"learning_rate": 9.99864620589731e-06, |
|
"loss": 0.893, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.324, |
|
"grad_norm": 0.819341778755188, |
|
"learning_rate": 9.998050575201772e-06, |
|
"loss": 0.9172, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.328, |
|
"grad_norm": 0.6771552562713623, |
|
"learning_rate": 9.99734667851357e-06, |
|
"loss": 0.92, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.332, |
|
"grad_norm": 0.9496891498565674, |
|
"learning_rate": 9.99653453108026e-06, |
|
"loss": 0.9024, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 0.6092897653579712, |
|
"learning_rate": 9.995614150494293e-06, |
|
"loss": 0.8922, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.8747417330741882, |
|
"learning_rate": 9.994585556692624e-06, |
|
"loss": 0.8498, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.344, |
|
"grad_norm": 0.788902759552002, |
|
"learning_rate": 9.993448771956285e-06, |
|
"loss": 0.8773, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.348, |
|
"grad_norm": 0.8757255673408508, |
|
"learning_rate": 9.992203820909906e-06, |
|
"loss": 0.8891, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 0.8785430788993835, |
|
"learning_rate": 9.99085073052117e-06, |
|
"loss": 0.8881, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.356, |
|
"grad_norm": 0.7009172439575195, |
|
"learning_rate": 9.989389530100242e-06, |
|
"loss": 0.8782, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8035963177680969, |
|
"learning_rate": 9.987820251299121e-06, |
|
"loss": 0.8983, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.364, |
|
"grad_norm": 0.6838966012001038, |
|
"learning_rate": 9.986142928110972e-06, |
|
"loss": 0.9172, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.368, |
|
"grad_norm": 0.8380683660507202, |
|
"learning_rate": 9.984357596869369e-06, |
|
"loss": 0.9262, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.372, |
|
"grad_norm": 0.6786786913871765, |
|
"learning_rate": 9.982464296247523e-06, |
|
"loss": 0.8963, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.376, |
|
"grad_norm": 0.7757643461227417, |
|
"learning_rate": 9.980463067257437e-06, |
|
"loss": 0.8983, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.647390604019165, |
|
"learning_rate": 9.978353953249023e-06, |
|
"loss": 0.9304, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.7196109294891357, |
|
"learning_rate": 9.976136999909156e-06, |
|
"loss": 0.8958, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.388, |
|
"grad_norm": 0.7190807461738586, |
|
"learning_rate": 9.973812255260692e-06, |
|
"loss": 0.8795, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.392, |
|
"grad_norm": 0.738398551940918, |
|
"learning_rate": 9.971379769661422e-06, |
|
"loss": 0.9067, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.396, |
|
"grad_norm": 0.767160177230835, |
|
"learning_rate": 9.968839595802982e-06, |
|
"loss": 0.9028, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5966362953186035, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 0.8738, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.404, |
|
"grad_norm": 0.7598825693130493, |
|
"learning_rate": 9.963436405737476e-06, |
|
"loss": 0.9183, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.408, |
|
"grad_norm": 0.7260900735855103, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 0.8845, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.412, |
|
"grad_norm": 0.7225805521011353, |
|
"learning_rate": 9.957603153229559e-06, |
|
"loss": 0.8923, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 0.7357990145683289, |
|
"learning_rate": 9.95452541005172e-06, |
|
"loss": 0.9223, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6201126575469971, |
|
"learning_rate": 9.951340343707852e-06, |
|
"loss": 0.9089, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.424, |
|
"grad_norm": 0.7525957226753235, |
|
"learning_rate": 9.948048023191728e-06, |
|
"loss": 0.9297, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.428, |
|
"grad_norm": 0.6546491384506226, |
|
"learning_rate": 9.94464851982042e-06, |
|
"loss": 0.8841, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 0.6633215546607971, |
|
"learning_rate": 9.941141907232766e-06, |
|
"loss": 0.8413, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.436, |
|
"grad_norm": 0.7645735740661621, |
|
"learning_rate": 9.937528261387753e-06, |
|
"loss": 0.8973, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6612620949745178, |
|
"learning_rate": 9.933807660562898e-06, |
|
"loss": 0.9139, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.444, |
|
"grad_norm": 0.6729806661605835, |
|
"learning_rate": 9.929980185352525e-06, |
|
"loss": 0.8575, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.5767675638198853, |
|
"learning_rate": 9.926045918666045e-06, |
|
"loss": 0.8975, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.452, |
|
"grad_norm": 0.5622053146362305, |
|
"learning_rate": 9.92200494572614e-06, |
|
"loss": 0.8744, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.456, |
|
"grad_norm": 0.5797794461250305, |
|
"learning_rate": 9.91785735406693e-06, |
|
"loss": 0.9071, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6670066714286804, |
|
"learning_rate": 9.913603233532067e-06, |
|
"loss": 0.8851, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.464, |
|
"grad_norm": 0.5439545512199402, |
|
"learning_rate": 9.909242676272797e-06, |
|
"loss": 0.8859, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.468, |
|
"grad_norm": 0.7104266881942749, |
|
"learning_rate": 9.904775776745959e-06, |
|
"loss": 0.9192, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.472, |
|
"grad_norm": 0.6157470345497131, |
|
"learning_rate": 9.90020263171194e-06, |
|
"loss": 0.924, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.476, |
|
"grad_norm": 0.6168134212493896, |
|
"learning_rate": 9.89552334023258e-06, |
|
"loss": 0.9017, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6102380752563477, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 0.885, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.484, |
|
"grad_norm": 0.6744838953018188, |
|
"learning_rate": 9.88584672567954e-06, |
|
"loss": 0.8535, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.488, |
|
"grad_norm": 0.6119304895401001, |
|
"learning_rate": 9.880849612217238e-06, |
|
"loss": 0.9136, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.492, |
|
"grad_norm": 0.5440135598182678, |
|
"learning_rate": 9.875746771527817e-06, |
|
"loss": 0.891, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.496, |
|
"grad_norm": 0.6523988842964172, |
|
"learning_rate": 9.870538314147194e-06, |
|
"loss": 0.8793, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7202532291412354, |
|
"learning_rate": 9.86522435289912e-06, |
|
"loss": 0.9114, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.504, |
|
"grad_norm": 0.7352282404899597, |
|
"learning_rate": 9.859805002892733e-06, |
|
"loss": 0.925, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.508, |
|
"grad_norm": 0.7045993804931641, |
|
"learning_rate": 9.85428038152006e-06, |
|
"loss": 0.9076, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.6108631491661072, |
|
"learning_rate": 9.84865060845349e-06, |
|
"loss": 0.8609, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.516, |
|
"grad_norm": 0.5544862747192383, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 0.8873, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6903448104858398, |
|
"learning_rate": 9.83707609731432e-06, |
|
"loss": 0.8593, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.524, |
|
"grad_norm": 0.634729266166687, |
|
"learning_rate": 9.831131609964664e-06, |
|
"loss": 0.8927, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 0.5572838187217712, |
|
"learning_rate": 9.825082472361558e-06, |
|
"loss": 0.8913, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.532, |
|
"grad_norm": 0.690453827381134, |
|
"learning_rate": 9.818928815539266e-06, |
|
"loss": 0.8991, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.536, |
|
"grad_norm": 0.6307182908058167, |
|
"learning_rate": 9.812670772796113e-06, |
|
"loss": 0.8736, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6966649889945984, |
|
"learning_rate": 9.806308479691595e-06, |
|
"loss": 0.9092, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 0.6075841188430786, |
|
"learning_rate": 9.799842074043438e-06, |
|
"loss": 0.8942, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.548, |
|
"grad_norm": 0.7378610968589783, |
|
"learning_rate": 9.793271695924621e-06, |
|
"loss": 0.9019, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.552, |
|
"grad_norm": 0.5990914106369019, |
|
"learning_rate": 9.786597487660336e-06, |
|
"loss": 0.8997, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.556, |
|
"grad_norm": 0.6251294016838074, |
|
"learning_rate": 9.779819593824909e-06, |
|
"loss": 0.902, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6688668727874756, |
|
"learning_rate": 9.77293816123866e-06, |
|
"loss": 0.9324, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.564, |
|
"grad_norm": 0.5460559725761414, |
|
"learning_rate": 9.765953338964736e-06, |
|
"loss": 0.9258, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.568, |
|
"grad_norm": 0.639219343662262, |
|
"learning_rate": 9.75886527830587e-06, |
|
"loss": 0.9035, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.572, |
|
"grad_norm": 0.5945067405700684, |
|
"learning_rate": 9.751674132801106e-06, |
|
"loss": 0.8789, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.5545593500137329, |
|
"learning_rate": 9.744380058222483e-06, |
|
"loss": 0.848, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7910006642341614, |
|
"learning_rate": 9.736983212571646e-06, |
|
"loss": 0.9007, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.584, |
|
"grad_norm": 0.617174506187439, |
|
"learning_rate": 9.729483756076436e-06, |
|
"loss": 0.8592, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.588, |
|
"grad_norm": 0.7085884809494019, |
|
"learning_rate": 9.721881851187406e-06, |
|
"loss": 0.8595, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.592, |
|
"grad_norm": 0.7189193367958069, |
|
"learning_rate": 9.714177662574316e-06, |
|
"loss": 0.8635, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.596, |
|
"grad_norm": 0.7401798367500305, |
|
"learning_rate": 9.70637135712256e-06, |
|
"loss": 0.9157, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7053728103637695, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 0.8875, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.604, |
|
"grad_norm": 0.6575140953063965, |
|
"learning_rate": 9.690453074301035e-06, |
|
"loss": 0.8598, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 0.6678581237792969, |
|
"learning_rate": 9.682341441747446e-06, |
|
"loss": 0.8933, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.612, |
|
"grad_norm": 0.6995222568511963, |
|
"learning_rate": 9.674128381980073e-06, |
|
"loss": 0.8675, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.616, |
|
"grad_norm": 0.6304057240486145, |
|
"learning_rate": 9.665814072907293e-06, |
|
"loss": 0.8784, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6518959403038025, |
|
"learning_rate": 9.657398694630713e-06, |
|
"loss": 0.8861, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 0.7137309312820435, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 0.8722, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.628, |
|
"grad_norm": 0.6826603412628174, |
|
"learning_rate": 9.640265461815235e-06, |
|
"loss": 0.9008, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.632, |
|
"grad_norm": 0.5501178503036499, |
|
"learning_rate": 9.63154797841033e-06, |
|
"loss": 0.8751, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.636, |
|
"grad_norm": 0.5834782719612122, |
|
"learning_rate": 9.622730168061568e-06, |
|
"loss": 0.8597, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5569977164268494, |
|
"learning_rate": 9.613812221777212e-06, |
|
"loss": 0.872, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.644, |
|
"grad_norm": 0.619074821472168, |
|
"learning_rate": 9.604794332734647e-06, |
|
"loss": 0.8832, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.648, |
|
"grad_norm": 0.5631194710731506, |
|
"learning_rate": 9.595676696276173e-06, |
|
"loss": 0.8768, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.652, |
|
"grad_norm": 0.6887290477752686, |
|
"learning_rate": 9.586459509904786e-06, |
|
"loss": 0.8677, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.656, |
|
"grad_norm": 0.6014657616615295, |
|
"learning_rate": 9.577142973279896e-06, |
|
"loss": 0.8617, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5356835722923279, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 0.8855, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.664, |
|
"grad_norm": 0.7122846841812134, |
|
"learning_rate": 9.55821265866333e-06, |
|
"loss": 0.8652, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.668, |
|
"grad_norm": 0.5801354646682739, |
|
"learning_rate": 9.548599290733393e-06, |
|
"loss": 0.923, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.6620292663574219, |
|
"learning_rate": 9.538887392664544e-06, |
|
"loss": 0.9191, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.676, |
|
"grad_norm": 0.6333494186401367, |
|
"learning_rate": 9.529077174832466e-06, |
|
"loss": 0.8987, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.7241418957710266, |
|
"learning_rate": 9.519168849742603e-06, |
|
"loss": 0.8746, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.684, |
|
"grad_norm": 0.6281126737594604, |
|
"learning_rate": 9.50916263202557e-06, |
|
"loss": 0.8844, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.688, |
|
"grad_norm": 0.7443065047264099, |
|
"learning_rate": 9.499058738432492e-06, |
|
"loss": 0.8763, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.692, |
|
"grad_norm": 0.6006516814231873, |
|
"learning_rate": 9.488857387830315e-06, |
|
"loss": 0.8783, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.696, |
|
"grad_norm": 0.6572588682174683, |
|
"learning_rate": 9.478558801197065e-06, |
|
"loss": 0.8808, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.628270149230957, |
|
"learning_rate": 9.468163201617063e-06, |
|
"loss": 0.8312, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.6484922170639038, |
|
"learning_rate": 9.457670814276083e-06, |
|
"loss": 0.8979, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.708, |
|
"grad_norm": 0.6248320937156677, |
|
"learning_rate": 9.44708186645649e-06, |
|
"loss": 0.8708, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.712, |
|
"grad_norm": 0.8069347739219666, |
|
"learning_rate": 9.436396587532297e-06, |
|
"loss": 0.9053, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.716, |
|
"grad_norm": 0.5655907392501831, |
|
"learning_rate": 9.425615208964217e-06, |
|
"loss": 0.8701, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9169421195983887, |
|
"learning_rate": 9.414737964294636e-06, |
|
"loss": 0.9063, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.724, |
|
"grad_norm": 0.6653777956962585, |
|
"learning_rate": 9.403765089142554e-06, |
|
"loss": 0.8534, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.728, |
|
"grad_norm": 0.7143352627754211, |
|
"learning_rate": 9.392696821198488e-06, |
|
"loss": 0.868, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.732, |
|
"grad_norm": 0.7472746968269348, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 0.8942, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 0.6738988757133484, |
|
"learning_rate": 9.370275068023097e-06, |
|
"loss": 0.9089, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7050944566726685, |
|
"learning_rate": 9.358922068483813e-06, |
|
"loss": 0.8677, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.744, |
|
"grad_norm": 0.7963171601295471, |
|
"learning_rate": 9.347474647526095e-06, |
|
"loss": 0.8855, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.748, |
|
"grad_norm": 0.8421769738197327, |
|
"learning_rate": 9.335933053119906e-06, |
|
"loss": 0.8683, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.752, |
|
"grad_norm": 0.6927816867828369, |
|
"learning_rate": 9.324297535275156e-06, |
|
"loss": 0.8607, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.756, |
|
"grad_norm": 0.7820339202880859, |
|
"learning_rate": 9.312568346036288e-06, |
|
"loss": 0.8911, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6646762490272522, |
|
"learning_rate": 9.30074573947683e-06, |
|
"loss": 0.8981, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.764, |
|
"grad_norm": 0.6774222254753113, |
|
"learning_rate": 9.288829971693869e-06, |
|
"loss": 0.8827, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.5905522108078003, |
|
"learning_rate": 9.276821300802535e-06, |
|
"loss": 0.8589, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.772, |
|
"grad_norm": 0.7953365445137024, |
|
"learning_rate": 9.264719986930376e-06, |
|
"loss": 0.8921, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.776, |
|
"grad_norm": 0.6584376692771912, |
|
"learning_rate": 9.25252629221175e-06, |
|
"loss": 0.8886, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.6534374952316284, |
|
"learning_rate": 9.24024048078213e-06, |
|
"loss": 0.891, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.784, |
|
"grad_norm": 0.6690033078193665, |
|
"learning_rate": 9.227862818772392e-06, |
|
"loss": 0.8743, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.788, |
|
"grad_norm": 0.6820501089096069, |
|
"learning_rate": 9.215393574303043e-06, |
|
"loss": 0.8985, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.792, |
|
"grad_norm": 0.7419943809509277, |
|
"learning_rate": 9.202833017478421e-06, |
|
"loss": 0.8848, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.796, |
|
"grad_norm": 0.6446091532707214, |
|
"learning_rate": 9.190181420380838e-06, |
|
"loss": 0.8926, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.715246856212616, |
|
"learning_rate": 9.177439057064684e-06, |
|
"loss": 0.8825, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.804, |
|
"grad_norm": 0.7427367568016052, |
|
"learning_rate": 9.164606203550498e-06, |
|
"loss": 0.8927, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.808, |
|
"grad_norm": 0.4903748333454132, |
|
"learning_rate": 9.151683137818989e-06, |
|
"loss": 0.8901, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.812, |
|
"grad_norm": 0.580417811870575, |
|
"learning_rate": 9.138670139805004e-06, |
|
"loss": 0.8637, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 0.610196590423584, |
|
"learning_rate": 9.125567491391476e-06, |
|
"loss": 0.8796, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.6373223066329956, |
|
"learning_rate": 9.112375476403313e-06, |
|
"loss": 0.8838, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.824, |
|
"grad_norm": 0.5853657722473145, |
|
"learning_rate": 9.099094380601244e-06, |
|
"loss": 0.8806, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.828, |
|
"grad_norm": 0.49319297075271606, |
|
"learning_rate": 9.085724491675642e-06, |
|
"loss": 0.8677, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.6091550588607788, |
|
"learning_rate": 9.072266099240286e-06, |
|
"loss": 0.8656, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.836, |
|
"grad_norm": 0.5975654125213623, |
|
"learning_rate": 9.058719494826076e-06, |
|
"loss": 0.9051, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.7250429391860962, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.8553, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.844, |
|
"grad_norm": 0.6404184103012085, |
|
"learning_rate": 9.031362825732456e-06, |
|
"loss": 0.838, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.848, |
|
"grad_norm": 0.636917233467102, |
|
"learning_rate": 9.017553353643479e-06, |
|
"loss": 0.8485, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.852, |
|
"grad_norm": 0.7165372371673584, |
|
"learning_rate": 9.003656854743667e-06, |
|
"loss": 0.8685, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.856, |
|
"grad_norm": 0.6287906765937805, |
|
"learning_rate": 8.989673630054044e-06, |
|
"loss": 0.8838, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.6897153258323669, |
|
"learning_rate": 8.97560398247424e-06, |
|
"loss": 0.8941, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.7562617659568787, |
|
"learning_rate": 8.961448216775955e-06, |
|
"loss": 0.8749, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.868, |
|
"grad_norm": 0.5686541795730591, |
|
"learning_rate": 8.947206639596346e-06, |
|
"loss": 0.8698, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.872, |
|
"grad_norm": 0.7385779619216919, |
|
"learning_rate": 8.932879559431392e-06, |
|
"loss": 0.8785, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.876, |
|
"grad_norm": 0.6453441977500916, |
|
"learning_rate": 8.9184672866292e-06, |
|
"loss": 0.8544, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.8072611093521118, |
|
"learning_rate": 8.903970133383297e-06, |
|
"loss": 0.8718, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.884, |
|
"grad_norm": 0.6873461008071899, |
|
"learning_rate": 8.889388413725857e-06, |
|
"loss": 0.8845, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.888, |
|
"grad_norm": 0.7314361929893494, |
|
"learning_rate": 8.874722443520898e-06, |
|
"loss": 0.8734, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.892, |
|
"grad_norm": 0.582042932510376, |
|
"learning_rate": 8.859972540457451e-06, |
|
"loss": 0.8626, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.5957586169242859, |
|
"learning_rate": 8.845139024042664e-06, |
|
"loss": 0.8909, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.684726357460022, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.8861, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.904, |
|
"grad_norm": 0.6183290481567383, |
|
"learning_rate": 8.815222438236726e-06, |
|
"loss": 0.8782, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.908, |
|
"grad_norm": 0.6130846738815308, |
|
"learning_rate": 8.800140016888009e-06, |
|
"loss": 0.9471, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 0.6433553099632263, |
|
"learning_rate": 8.784975278258783e-06, |
|
"loss": 0.8722, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.916, |
|
"grad_norm": 0.5252291560173035, |
|
"learning_rate": 8.769728550842217e-06, |
|
"loss": 0.8919, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.6188841462135315, |
|
"learning_rate": 8.754400164907496e-06, |
|
"loss": 0.8262, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.924, |
|
"grad_norm": 0.6740955114364624, |
|
"learning_rate": 8.73899045249266e-06, |
|
"loss": 0.8602, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 0.5842002630233765, |
|
"learning_rate": 8.723499747397415e-06, |
|
"loss": 0.8629, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.932, |
|
"grad_norm": 0.6343997120857239, |
|
"learning_rate": 8.707928385175898e-06, |
|
"loss": 0.8843, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.936, |
|
"grad_norm": 0.5856566429138184, |
|
"learning_rate": 8.692276703129421e-06, |
|
"loss": 0.8469, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5288295745849609, |
|
"learning_rate": 8.676545040299145e-06, |
|
"loss": 0.8523, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.944, |
|
"grad_norm": 0.5988165736198425, |
|
"learning_rate": 8.660733737458751e-06, |
|
"loss": 0.8734, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.948, |
|
"grad_norm": 0.5942241549491882, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 0.872, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.952, |
|
"grad_norm": 0.6277519464492798, |
|
"learning_rate": 8.628873583460593e-06, |
|
"loss": 0.8441, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.956, |
|
"grad_norm": 0.6779516339302063, |
|
"learning_rate": 8.61282542244614e-06, |
|
"loss": 0.8928, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6148802638053894, |
|
"learning_rate": 8.596699001693257e-06, |
|
"loss": 0.8329, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.964, |
|
"grad_norm": 0.6112699508666992, |
|
"learning_rate": 8.580494670526725e-06, |
|
"loss": 0.892, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.968, |
|
"grad_norm": 0.6572790741920471, |
|
"learning_rate": 8.564212779959003e-06, |
|
"loss": 0.9021, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.972, |
|
"grad_norm": 0.6486849784851074, |
|
"learning_rate": 8.547853682682605e-06, |
|
"loss": 0.8855, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.976, |
|
"grad_norm": 0.5732429027557373, |
|
"learning_rate": 8.531417733062476e-06, |
|
"loss": 0.8609, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.6210253834724426, |
|
"learning_rate": 8.51490528712831e-06, |
|
"loss": 0.8396, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.984, |
|
"grad_norm": 0.5373111963272095, |
|
"learning_rate": 8.498316702566828e-06, |
|
"loss": 0.8544, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.988, |
|
"grad_norm": 0.5937831401824951, |
|
"learning_rate": 8.481652338714048e-06, |
|
"loss": 0.8283, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 0.7062596082687378, |
|
"learning_rate": 8.464912556547486e-06, |
|
"loss": 0.8407, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.996, |
|
"grad_norm": 0.7154653668403625, |
|
"learning_rate": 8.44809771867835e-06, |
|
"loss": 0.8549, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.734148383140564, |
|
"learning_rate": 8.43120818934367e-06, |
|
"loss": 0.87, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.004, |
|
"grad_norm": 0.6793926358222961, |
|
"learning_rate": 8.414244334398418e-06, |
|
"loss": 0.7984, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 0.6782513856887817, |
|
"learning_rate": 8.397206521307584e-06, |
|
"loss": 0.8299, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.012, |
|
"grad_norm": 0.7735963463783264, |
|
"learning_rate": 8.380095119138209e-06, |
|
"loss": 0.8501, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.016, |
|
"grad_norm": 0.7407426238059998, |
|
"learning_rate": 8.362910498551402e-06, |
|
"loss": 0.8452, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.7997364401817322, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 0.8553, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.7399711608886719, |
|
"learning_rate": 8.328323092691985e-06, |
|
"loss": 0.7941, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.028, |
|
"grad_norm": 0.6705906987190247, |
|
"learning_rate": 8.310921056639451e-06, |
|
"loss": 0.8211, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.032, |
|
"grad_norm": 0.6934743523597717, |
|
"learning_rate": 8.293447300593402e-06, |
|
"loss": 0.8224, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.036, |
|
"grad_norm": 0.611541211605072, |
|
"learning_rate": 8.275902203064125e-06, |
|
"loss": 0.8435, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.7381828427314758, |
|
"learning_rate": 8.258286144107277e-06, |
|
"loss": 0.8238, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.044, |
|
"grad_norm": 0.6333577036857605, |
|
"learning_rate": 8.240599505315656e-06, |
|
"loss": 0.8142, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.048, |
|
"grad_norm": 0.7947782874107361, |
|
"learning_rate": 8.222842669810936e-06, |
|
"loss": 0.8364, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.052, |
|
"grad_norm": 0.6043962836265564, |
|
"learning_rate": 8.205016022235368e-06, |
|
"loss": 0.8271, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 0.6960867047309875, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 0.8077, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.60722815990448, |
|
"learning_rate": 8.16915483699355e-06, |
|
"loss": 0.7997, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.064, |
|
"grad_norm": 0.6178349852561951, |
|
"learning_rate": 8.151121076139534e-06, |
|
"loss": 0.8704, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.068, |
|
"grad_norm": 0.6908637881278992, |
|
"learning_rate": 8.133019056822303e-06, |
|
"loss": 0.8778, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.072, |
|
"grad_norm": 0.5272776484489441, |
|
"learning_rate": 8.11484917116136e-06, |
|
"loss": 0.8283, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.076, |
|
"grad_norm": 0.5782445073127747, |
|
"learning_rate": 8.096611812746302e-06, |
|
"loss": 0.8078, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.6523330211639404, |
|
"learning_rate": 8.078307376628292e-06, |
|
"loss": 0.8318, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.084, |
|
"grad_norm": 0.4992999732494354, |
|
"learning_rate": 8.059936259311514e-06, |
|
"loss": 0.7898, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.6073350310325623, |
|
"learning_rate": 8.041498858744572e-06, |
|
"loss": 0.8512, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.092, |
|
"grad_norm": 0.6151973605155945, |
|
"learning_rate": 8.022995574311876e-06, |
|
"loss": 0.8189, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.096, |
|
"grad_norm": 0.5909215211868286, |
|
"learning_rate": 8.004426806824985e-06, |
|
"loss": 0.7754, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.6663960814476013, |
|
"learning_rate": 7.985792958513932e-06, |
|
"loss": 0.7863, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 0.5585909485816956, |
|
"learning_rate": 7.967094433018508e-06, |
|
"loss": 0.8159, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.108, |
|
"grad_norm": 0.6069695949554443, |
|
"learning_rate": 7.948331635379517e-06, |
|
"loss": 0.8046, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.112, |
|
"grad_norm": 0.6288580894470215, |
|
"learning_rate": 7.929504972030003e-06, |
|
"loss": 0.8191, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.116, |
|
"grad_norm": 0.5330884456634521, |
|
"learning_rate": 7.910614850786448e-06, |
|
"loss": 0.8353, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5828813314437866, |
|
"learning_rate": 7.891661680839932e-06, |
|
"loss": 0.8404, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.124, |
|
"grad_norm": 0.6435533165931702, |
|
"learning_rate": 7.872645872747281e-06, |
|
"loss": 0.8151, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.1280000000000001, |
|
"grad_norm": 0.7021773457527161, |
|
"learning_rate": 7.85356783842216e-06, |
|
"loss": 0.8314, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.1320000000000001, |
|
"grad_norm": 0.5960518717765808, |
|
"learning_rate": 7.834427991126155e-06, |
|
"loss": 0.7978, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.1360000000000001, |
|
"grad_norm": 0.6122227311134338, |
|
"learning_rate": 7.815226745459831e-06, |
|
"loss": 0.8245, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.1400000000000001, |
|
"grad_norm": 0.5207979679107666, |
|
"learning_rate": 7.795964517353734e-06, |
|
"loss": 0.8185, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.144, |
|
"grad_norm": 0.5806198716163635, |
|
"learning_rate": 7.776641724059398e-06, |
|
"loss": 0.8114, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.148, |
|
"grad_norm": 0.6067046523094177, |
|
"learning_rate": 7.757258784140286e-06, |
|
"loss": 0.8145, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.5425034165382385, |
|
"learning_rate": 7.737816117462752e-06, |
|
"loss": 0.8004, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.156, |
|
"grad_norm": 0.5887053608894348, |
|
"learning_rate": 7.718314145186918e-06, |
|
"loss": 0.8432, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5984364151954651, |
|
"learning_rate": 7.698753289757565e-06, |
|
"loss": 0.8205, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.164, |
|
"grad_norm": 0.6469537615776062, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 0.82, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.168, |
|
"grad_norm": 0.5600078701972961, |
|
"learning_rate": 7.65945662558579e-06, |
|
"loss": 0.8316, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.172, |
|
"grad_norm": 0.5654756426811218, |
|
"learning_rate": 7.639721668073718e-06, |
|
"loss": 0.8174, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.176, |
|
"grad_norm": 0.7125182151794434, |
|
"learning_rate": 7.619929529850397e-06, |
|
"loss": 0.813, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.5197665095329285, |
|
"learning_rate": 7.600080639646077e-06, |
|
"loss": 0.8239, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.184, |
|
"grad_norm": 0.6301279664039612, |
|
"learning_rate": 7.580175427420358e-06, |
|
"loss": 0.8434, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.188, |
|
"grad_norm": 0.5137627124786377, |
|
"learning_rate": 7.560214324352858e-06, |
|
"loss": 0.8076, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.192, |
|
"grad_norm": 0.6159291863441467, |
|
"learning_rate": 7.54019776283389e-06, |
|
"loss": 0.8301, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.196, |
|
"grad_norm": 0.68797367811203, |
|
"learning_rate": 7.520126176455084e-06, |
|
"loss": 0.8249, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5145540237426758, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7956, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.204, |
|
"grad_norm": 0.5432578921318054, |
|
"learning_rate": 7.479819669434712e-06, |
|
"loss": 0.8577, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.208, |
|
"grad_norm": 0.5872198343276978, |
|
"learning_rate": 7.459585621898353e-06, |
|
"loss": 0.8661, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.212, |
|
"grad_norm": 0.5940935015678406, |
|
"learning_rate": 7.4392982956936644e-06, |
|
"loss": 0.8543, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.5254611372947693, |
|
"learning_rate": 7.418958130277483e-06, |
|
"loss": 0.8148, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5751111507415771, |
|
"learning_rate": 7.398565566251232e-06, |
|
"loss": 0.8162, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.224, |
|
"grad_norm": 0.5236274003982544, |
|
"learning_rate": 7.378121045351378e-06, |
|
"loss": 0.8242, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.228, |
|
"grad_norm": 0.5233936309814453, |
|
"learning_rate": 7.357625010439853e-06, |
|
"loss": 0.8429, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.232, |
|
"grad_norm": 0.6502784490585327, |
|
"learning_rate": 7.337077905494472e-06, |
|
"loss": 0.8413, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.236, |
|
"grad_norm": 0.5654633641242981, |
|
"learning_rate": 7.31648017559931e-06, |
|
"loss": 0.8431, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.6566516757011414, |
|
"learning_rate": 7.295832266935059e-06, |
|
"loss": 0.8634, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.244, |
|
"grad_norm": 0.6114216446876526, |
|
"learning_rate": 7.275134626769369e-06, |
|
"loss": 0.8387, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 0.668125331401825, |
|
"learning_rate": 7.254387703447154e-06, |
|
"loss": 0.8087, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.252, |
|
"grad_norm": 0.6646181344985962, |
|
"learning_rate": 7.233591946380884e-06, |
|
"loss": 0.8138, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.256, |
|
"grad_norm": 0.5681068301200867, |
|
"learning_rate": 7.212747806040845e-06, |
|
"loss": 0.8258, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.6887611746788025, |
|
"learning_rate": 7.191855733945388e-06, |
|
"loss": 0.8444, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.264, |
|
"grad_norm": 0.707757294178009, |
|
"learning_rate": 7.170916182651141e-06, |
|
"loss": 0.8155, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.268, |
|
"grad_norm": 0.6170247793197632, |
|
"learning_rate": 7.149929605743214e-06, |
|
"loss": 0.7777, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.272, |
|
"grad_norm": 0.9348410367965698, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 0.7944, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.276, |
|
"grad_norm": 0.6176925301551819, |
|
"learning_rate": 7.107817194510157e-06, |
|
"loss": 0.8503, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6812437176704407, |
|
"learning_rate": 7.08669227240909e-06, |
|
"loss": 0.8266, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.284, |
|
"grad_norm": 0.7825397253036499, |
|
"learning_rate": 7.06552214912271e-06, |
|
"loss": 0.8107, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.288, |
|
"grad_norm": 0.583035409450531, |
|
"learning_rate": 7.04430728323069e-06, |
|
"loss": 0.7889, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.292, |
|
"grad_norm": 0.8215271830558777, |
|
"learning_rate": 7.023048134281907e-06, |
|
"loss": 0.814, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 0.6114867329597473, |
|
"learning_rate": 7.0017451627844765e-06, |
|
"loss": 0.8144, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.6182892322540283, |
|
"learning_rate": 6.980398830195785e-06, |
|
"loss": 0.816, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.304, |
|
"grad_norm": 0.7972235679626465, |
|
"learning_rate": 6.959009598912493e-06, |
|
"loss": 0.8439, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.308, |
|
"grad_norm": 0.5884739756584167, |
|
"learning_rate": 6.9375779322605154e-06, |
|
"loss": 0.8244, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.312, |
|
"grad_norm": 0.81697678565979, |
|
"learning_rate": 6.916104294484988e-06, |
|
"loss": 0.851, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.316, |
|
"grad_norm": 0.6550260782241821, |
|
"learning_rate": 6.8945891507402075e-06, |
|
"loss": 0.7992, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.7166432738304138, |
|
"learning_rate": 6.873032967079562e-06, |
|
"loss": 0.8443, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.324, |
|
"grad_norm": 0.6055835485458374, |
|
"learning_rate": 6.851436210445427e-06, |
|
"loss": 0.8277, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.328, |
|
"grad_norm": 0.5842556953430176, |
|
"learning_rate": 6.829799348659061e-06, |
|
"loss": 0.8135, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.332, |
|
"grad_norm": 0.7298783659934998, |
|
"learning_rate": 6.808122850410461e-06, |
|
"loss": 0.8288, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.336, |
|
"grad_norm": 0.5614907145500183, |
|
"learning_rate": 6.7864071852482205e-06, |
|
"loss": 0.8283, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.7044088840484619, |
|
"learning_rate": 6.7646528235693445e-06, |
|
"loss": 0.8464, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.7158640027046204, |
|
"learning_rate": 6.7428602366090764e-06, |
|
"loss": 0.8347, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.3479999999999999, |
|
"grad_norm": 0.5172532200813293, |
|
"learning_rate": 6.721029896430678e-06, |
|
"loss": 0.8384, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.3519999999999999, |
|
"grad_norm": 0.6013163924217224, |
|
"learning_rate": 6.699162275915208e-06, |
|
"loss": 0.8151, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.3559999999999999, |
|
"grad_norm": 0.5935965180397034, |
|
"learning_rate": 6.677257848751276e-06, |
|
"loss": 0.832, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.5422365069389343, |
|
"learning_rate": 6.655317089424791e-06, |
|
"loss": 0.7989, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3639999999999999, |
|
"grad_norm": 0.5724878907203674, |
|
"learning_rate": 6.633340473208673e-06, |
|
"loss": 0.8106, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.3679999999999999, |
|
"grad_norm": 0.5668215155601501, |
|
"learning_rate": 6.611328476152557e-06, |
|
"loss": 0.8197, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.3719999999999999, |
|
"grad_norm": 0.6239653825759888, |
|
"learning_rate": 6.58928157507249e-06, |
|
"loss": 0.836, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.376, |
|
"grad_norm": 0.561103105545044, |
|
"learning_rate": 6.567200247540599e-06, |
|
"loss": 0.8207, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5621249079704285, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.8452, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.384, |
|
"grad_norm": 0.5491905808448792, |
|
"learning_rate": 6.522936227128139e-06, |
|
"loss": 0.7999, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.388, |
|
"grad_norm": 0.5302982330322266, |
|
"learning_rate": 6.500754493079029e-06, |
|
"loss": 0.8387, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 0.49233949184417725, |
|
"learning_rate": 6.4785402502202345e-06, |
|
"loss": 0.8127, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.396, |
|
"grad_norm": 0.5672960877418518, |
|
"learning_rate": 6.456293979748778e-06, |
|
"loss": 0.8312, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5462888479232788, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.818, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.404, |
|
"grad_norm": 0.5679849982261658, |
|
"learning_rate": 6.411707284214384e-06, |
|
"loss": 0.8109, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.6677523255348206, |
|
"learning_rate": 6.389367824972575e-06, |
|
"loss": 0.8329, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.412, |
|
"grad_norm": 0.5302258729934692, |
|
"learning_rate": 6.366998269739442e-06, |
|
"loss": 0.8209, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.416, |
|
"grad_norm": 0.6249000430107117, |
|
"learning_rate": 6.344599103076329e-06, |
|
"loss": 0.8423, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.6532145142555237, |
|
"learning_rate": 6.322170810186013e-06, |
|
"loss": 0.8315, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.424, |
|
"grad_norm": 0.5602342486381531, |
|
"learning_rate": 6.299713876902188e-06, |
|
"loss": 0.8397, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.428, |
|
"grad_norm": 0.7034306526184082, |
|
"learning_rate": 6.277228789678953e-06, |
|
"loss": 0.8165, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.432, |
|
"grad_norm": 0.6753026843070984, |
|
"learning_rate": 6.254716035580264e-06, |
|
"loss": 0.8237, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.436, |
|
"grad_norm": 0.6752943396568298, |
|
"learning_rate": 6.23217610226939e-06, |
|
"loss": 0.8345, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.6669530272483826, |
|
"learning_rate": 6.209609477998339e-06, |
|
"loss": 0.7928, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.444, |
|
"grad_norm": 0.667064368724823, |
|
"learning_rate": 6.187016651597299e-06, |
|
"loss": 0.8073, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.448, |
|
"grad_norm": 0.7478964924812317, |
|
"learning_rate": 6.16439811246403e-06, |
|
"loss": 0.8672, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.452, |
|
"grad_norm": 0.5698713064193726, |
|
"learning_rate": 6.141754350553279e-06, |
|
"loss": 0.8226, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.456, |
|
"grad_norm": 0.6351299285888672, |
|
"learning_rate": 6.119085856366158e-06, |
|
"loss": 0.7964, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.697109043598175, |
|
"learning_rate": 6.0963931209395165e-06, |
|
"loss": 0.7774, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.464, |
|
"grad_norm": 0.6185441017150879, |
|
"learning_rate": 6.073676635835317e-06, |
|
"loss": 0.8087, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.468, |
|
"grad_norm": 0.552899181842804, |
|
"learning_rate": 6.05093689312997e-06, |
|
"loss": 0.8159, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.6541306376457214, |
|
"learning_rate": 6.028174385403693e-06, |
|
"loss": 0.8259, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.476, |
|
"grad_norm": 0.5495116114616394, |
|
"learning_rate": 6.005389605729824e-06, |
|
"loss": 0.8156, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.6034694910049438, |
|
"learning_rate": 5.982583047664151e-06, |
|
"loss": 0.8131, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.484, |
|
"grad_norm": 0.6365712881088257, |
|
"learning_rate": 5.9597552052342174e-06, |
|
"loss": 0.8305, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 0.5371845960617065, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 0.8154, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.492, |
|
"grad_norm": 0.6355674266815186, |
|
"learning_rate": 5.914037645686308e-06, |
|
"loss": 0.8203, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.496, |
|
"grad_norm": 0.5485637187957764, |
|
"learning_rate": 5.891148918885834e-06, |
|
"loss": 0.8417, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5740164518356323, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.8497, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.504, |
|
"grad_norm": 0.5761863589286804, |
|
"learning_rate": 5.84531405025837e-06, |
|
"loss": 0.7931, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.508, |
|
"grad_norm": 0.6561781167984009, |
|
"learning_rate": 5.8223689012899945e-06, |
|
"loss": 0.7777, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.512, |
|
"grad_norm": 0.5368021726608276, |
|
"learning_rate": 5.799405938459175e-06, |
|
"loss": 0.8256, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.516, |
|
"grad_norm": 0.5972307324409485, |
|
"learning_rate": 5.776425659181438e-06, |
|
"loss": 0.8315, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.4860232174396515, |
|
"learning_rate": 5.753428561247416e-06, |
|
"loss": 0.8276, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.524, |
|
"grad_norm": 0.5551620721817017, |
|
"learning_rate": 5.730415142812059e-06, |
|
"loss": 0.8376, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.528, |
|
"grad_norm": 0.6248589158058167, |
|
"learning_rate": 5.707385902383845e-06, |
|
"loss": 0.8344, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.532, |
|
"grad_norm": 0.519512951374054, |
|
"learning_rate": 5.684341338813986e-06, |
|
"loss": 0.8016, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.5413280725479126, |
|
"learning_rate": 5.661281951285613e-06, |
|
"loss": 0.8216, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.5940418243408203, |
|
"learning_rate": 5.638208239302975e-06, |
|
"loss": 0.8302, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.544, |
|
"grad_norm": 0.5316104888916016, |
|
"learning_rate": 5.615120702680604e-06, |
|
"loss": 0.8124, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.548, |
|
"grad_norm": 0.5062645673751831, |
|
"learning_rate": 5.592019841532507e-06, |
|
"loss": 0.7866, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.552, |
|
"grad_norm": 0.5303630828857422, |
|
"learning_rate": 5.568906156261309e-06, |
|
"loss": 0.8453, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.556, |
|
"grad_norm": 0.5888771414756775, |
|
"learning_rate": 5.54578014754744e-06, |
|
"loss": 0.8188, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5190044641494751, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 0.8008, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.564, |
|
"grad_norm": 0.537983238697052, |
|
"learning_rate": 5.499493163837258e-06, |
|
"loss": 0.876, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.568, |
|
"grad_norm": 0.5991531610488892, |
|
"learning_rate": 5.476333191493108e-06, |
|
"loss": 0.7961, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.572, |
|
"grad_norm": 0.48126405477523804, |
|
"learning_rate": 5.453162900988902e-06, |
|
"loss": 0.8384, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.576, |
|
"grad_norm": 0.5848698616027832, |
|
"learning_rate": 5.429982794231221e-06, |
|
"loss": 0.8287, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.5644991397857666, |
|
"learning_rate": 5.406793373339292e-06, |
|
"loss": 0.825, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 0.49802395701408386, |
|
"learning_rate": 5.383595140634093e-06, |
|
"loss": 0.8195, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.588, |
|
"grad_norm": 0.5119688510894775, |
|
"learning_rate": 5.360388598627487e-06, |
|
"loss": 0.7859, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.592, |
|
"grad_norm": 0.5061742067337036, |
|
"learning_rate": 5.337174250011326e-06, |
|
"loss": 0.7739, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.596, |
|
"grad_norm": 0.5984253287315369, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 0.8241, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5524809956550598, |
|
"learning_rate": 5.290724144552379e-06, |
|
"loss": 0.8186, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.604, |
|
"grad_norm": 0.5304140448570251, |
|
"learning_rate": 5.267489393895247e-06, |
|
"loss": 0.7989, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.608, |
|
"grad_norm": 0.5568251609802246, |
|
"learning_rate": 5.244248848978067e-06, |
|
"loss": 0.8459, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.612, |
|
"grad_norm": 0.47173407673835754, |
|
"learning_rate": 5.221003013229253e-06, |
|
"loss": 0.8308, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.616, |
|
"grad_norm": 0.49636995792388916, |
|
"learning_rate": 5.197752390191827e-06, |
|
"loss": 0.8204, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5288768410682678, |
|
"learning_rate": 5.174497483512506e-06, |
|
"loss": 0.8598, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.624, |
|
"grad_norm": 0.4819859564304352, |
|
"learning_rate": 5.151238796930804e-06, |
|
"loss": 0.8271, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.6280000000000001, |
|
"grad_norm": 0.5618477463722229, |
|
"learning_rate": 5.127976834268112e-06, |
|
"loss": 0.8019, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 0.5314104557037354, |
|
"learning_rate": 5.1047120994167855e-06, |
|
"loss": 0.8342, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6360000000000001, |
|
"grad_norm": 0.47630175948143005, |
|
"learning_rate": 5.081445096329229e-06, |
|
"loss": 0.8294, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 0.5262562036514282, |
|
"learning_rate": 5.0581763290069865e-06, |
|
"loss": 0.797, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6440000000000001, |
|
"grad_norm": 0.5042218565940857, |
|
"learning_rate": 5.034906301489808e-06, |
|
"loss": 0.7843, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.6480000000000001, |
|
"grad_norm": 0.6031314730644226, |
|
"learning_rate": 5.011635517844753e-06, |
|
"loss": 0.7805, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.6520000000000001, |
|
"grad_norm": 0.5020240545272827, |
|
"learning_rate": 4.988364482155249e-06, |
|
"loss": 0.8101, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.6560000000000001, |
|
"grad_norm": 0.4847033619880676, |
|
"learning_rate": 4.965093698510192e-06, |
|
"loss": 0.7977, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.6600000000000001, |
|
"grad_norm": 0.5217180252075195, |
|
"learning_rate": 4.941823670993016e-06, |
|
"loss": 0.8385, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.4937134385108948, |
|
"learning_rate": 4.9185549036707715e-06, |
|
"loss": 0.8213, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.6680000000000001, |
|
"grad_norm": 0.5219268202781677, |
|
"learning_rate": 4.895287900583216e-06, |
|
"loss": 0.8416, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.6720000000000002, |
|
"grad_norm": 0.6660354137420654, |
|
"learning_rate": 4.87202316573189e-06, |
|
"loss": 0.809, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.6760000000000002, |
|
"grad_norm": 0.5231005549430847, |
|
"learning_rate": 4.8487612030691975e-06, |
|
"loss": 0.8451, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.49823904037475586, |
|
"learning_rate": 4.825502516487497e-06, |
|
"loss": 0.8033, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.6840000000000002, |
|
"grad_norm": 0.5597706437110901, |
|
"learning_rate": 4.802247609808175e-06, |
|
"loss": 0.7785, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.688, |
|
"grad_norm": 0.5253022313117981, |
|
"learning_rate": 4.778996986770747e-06, |
|
"loss": 0.8273, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.692, |
|
"grad_norm": 0.4566793739795685, |
|
"learning_rate": 4.755751151021934e-06, |
|
"loss": 0.8136, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.696, |
|
"grad_norm": 0.5758362412452698, |
|
"learning_rate": 4.732510606104754e-06, |
|
"loss": 0.7944, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.5128145813941956, |
|
"learning_rate": 4.7092758554476215e-06, |
|
"loss": 0.7787, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.704, |
|
"grad_norm": 0.4326518177986145, |
|
"learning_rate": 4.686047402353433e-06, |
|
"loss": 0.8449, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.708, |
|
"grad_norm": 0.5038052201271057, |
|
"learning_rate": 4.662825749988675e-06, |
|
"loss": 0.8656, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.712, |
|
"grad_norm": 0.48481109738349915, |
|
"learning_rate": 4.639611401372514e-06, |
|
"loss": 0.8263, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.716, |
|
"grad_norm": 0.45356905460357666, |
|
"learning_rate": 4.6164048593659076e-06, |
|
"loss": 0.7882, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5066087245941162, |
|
"learning_rate": 4.59320662666071e-06, |
|
"loss": 0.8271, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.724, |
|
"grad_norm": 0.5313457250595093, |
|
"learning_rate": 4.570017205768779e-06, |
|
"loss": 0.8182, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.5594462752342224, |
|
"learning_rate": 4.546837099011101e-06, |
|
"loss": 0.7873, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.732, |
|
"grad_norm": 0.4426927864551544, |
|
"learning_rate": 4.523666808506893e-06, |
|
"loss": 0.8012, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.736, |
|
"grad_norm": 0.5021507740020752, |
|
"learning_rate": 4.500506836162746e-06, |
|
"loss": 0.8149, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.46885576844215393, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 0.8252, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.744, |
|
"grad_norm": 0.4938061833381653, |
|
"learning_rate": 4.45421985245256e-06, |
|
"loss": 0.7868, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.748, |
|
"grad_norm": 0.4414080083370209, |
|
"learning_rate": 4.431093843738693e-06, |
|
"loss": 0.8138, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.752, |
|
"grad_norm": 0.4636024236679077, |
|
"learning_rate": 4.4079801584674955e-06, |
|
"loss": 0.8376, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.756, |
|
"grad_norm": 0.46044057607650757, |
|
"learning_rate": 4.384879297319398e-06, |
|
"loss": 0.8347, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.45228543877601624, |
|
"learning_rate": 4.361791760697027e-06, |
|
"loss": 0.8501, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.764, |
|
"grad_norm": 0.468711793422699, |
|
"learning_rate": 4.3387180487143875e-06, |
|
"loss": 0.8194, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.768, |
|
"grad_norm": 0.515216052532196, |
|
"learning_rate": 4.315658661186016e-06, |
|
"loss": 0.8352, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.772, |
|
"grad_norm": 0.5516295433044434, |
|
"learning_rate": 4.2926140976161555e-06, |
|
"loss": 0.8033, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 0.4367079734802246, |
|
"learning_rate": 4.269584857187942e-06, |
|
"loss": 0.7962, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.502176821231842, |
|
"learning_rate": 4.246571438752585e-06, |
|
"loss": 0.8152, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.784, |
|
"grad_norm": 0.5594773888587952, |
|
"learning_rate": 4.2235743408185635e-06, |
|
"loss": 0.8351, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.788, |
|
"grad_norm": 0.5325698852539062, |
|
"learning_rate": 4.200594061540827e-06, |
|
"loss": 0.8292, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.48320385813713074, |
|
"learning_rate": 4.1776310987100054e-06, |
|
"loss": 0.8349, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.796, |
|
"grad_norm": 0.6167989373207092, |
|
"learning_rate": 4.154685949741631e-06, |
|
"loss": 0.841, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5394158363342285, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.8478, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.804, |
|
"grad_norm": 0.5120943188667297, |
|
"learning_rate": 4.108851081114169e-06, |
|
"loss": 0.842, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.808, |
|
"grad_norm": 0.5258692502975464, |
|
"learning_rate": 4.0859623543136935e-06, |
|
"loss": 0.8085, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.812, |
|
"grad_norm": 0.5280582308769226, |
|
"learning_rate": 4.063093427071376e-06, |
|
"loss": 0.8226, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.8159999999999998, |
|
"grad_norm": 0.5288624167442322, |
|
"learning_rate": 4.040244794765783e-06, |
|
"loss": 0.7907, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.8199999999999998, |
|
"grad_norm": 0.5390572547912598, |
|
"learning_rate": 4.017416952335849e-06, |
|
"loss": 0.8148, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 0.5915282964706421, |
|
"learning_rate": 3.994610394270178e-06, |
|
"loss": 0.8446, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.8279999999999998, |
|
"grad_norm": 0.5086544752120972, |
|
"learning_rate": 3.971825614596308e-06, |
|
"loss": 0.8151, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.8319999999999999, |
|
"grad_norm": 0.5306235551834106, |
|
"learning_rate": 3.949063106870031e-06, |
|
"loss": 0.8365, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.8359999999999999, |
|
"grad_norm": 0.4914201498031616, |
|
"learning_rate": 3.926323364164684e-06, |
|
"loss": 0.8226, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.5558998584747314, |
|
"learning_rate": 3.903606879060483e-06, |
|
"loss": 0.7785, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8439999999999999, |
|
"grad_norm": 0.5253325700759888, |
|
"learning_rate": 3.880914143633844e-06, |
|
"loss": 0.7751, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.8479999999999999, |
|
"grad_norm": 0.539868175983429, |
|
"learning_rate": 3.8582456494467214e-06, |
|
"loss": 0.8252, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.8519999999999999, |
|
"grad_norm": 0.5210921168327332, |
|
"learning_rate": 3.835601887535971e-06, |
|
"loss": 0.782, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.43055686354637146, |
|
"learning_rate": 3.812983348402703e-06, |
|
"loss": 0.7884, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.8599999999999999, |
|
"grad_norm": 0.4651299715042114, |
|
"learning_rate": 3.790390522001662e-06, |
|
"loss": 0.8269, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.8639999999999999, |
|
"grad_norm": 0.516089141368866, |
|
"learning_rate": 3.767823897730612e-06, |
|
"loss": 0.8175, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.8679999999999999, |
|
"grad_norm": 0.4905009865760803, |
|
"learning_rate": 3.745283964419736e-06, |
|
"loss": 0.8265, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 0.4264768958091736, |
|
"learning_rate": 3.7227712103210485e-06, |
|
"loss": 0.8422, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.876, |
|
"grad_norm": 0.46796920895576477, |
|
"learning_rate": 3.700286123097814e-06, |
|
"loss": 0.8218, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.49499964714050293, |
|
"learning_rate": 3.6778291898139907e-06, |
|
"loss": 0.7906, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.884, |
|
"grad_norm": 0.4863964021205902, |
|
"learning_rate": 3.655400896923672e-06, |
|
"loss": 0.7864, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.888, |
|
"grad_norm": 0.49399423599243164, |
|
"learning_rate": 3.633001730260558e-06, |
|
"loss": 0.8228, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.892, |
|
"grad_norm": 0.4784768521785736, |
|
"learning_rate": 3.6106321750274275e-06, |
|
"loss": 0.816, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.896, |
|
"grad_norm": 0.4161972403526306, |
|
"learning_rate": 3.5882927157856175e-06, |
|
"loss": 0.8663, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.4483587145805359, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.8209, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.904, |
|
"grad_norm": 0.47628068923950195, |
|
"learning_rate": 3.543706020251223e-06, |
|
"loss": 0.8279, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.908, |
|
"grad_norm": 0.4492845833301544, |
|
"learning_rate": 3.521459749779769e-06, |
|
"loss": 0.8156, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.912, |
|
"grad_norm": 0.463316410779953, |
|
"learning_rate": 3.4992455069209717e-06, |
|
"loss": 0.8152, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.916, |
|
"grad_norm": 0.4651520252227783, |
|
"learning_rate": 3.4770637728718608e-06, |
|
"loss": 0.832, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.4673428535461426, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.8177, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.924, |
|
"grad_norm": 0.49237164855003357, |
|
"learning_rate": 3.4327997524594026e-06, |
|
"loss": 0.8401, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.928, |
|
"grad_norm": 0.47739100456237793, |
|
"learning_rate": 3.4107184249275114e-06, |
|
"loss": 0.8577, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.932, |
|
"grad_norm": 0.48048797249794006, |
|
"learning_rate": 3.3886715238474454e-06, |
|
"loss": 0.8171, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.936, |
|
"grad_norm": 0.45767322182655334, |
|
"learning_rate": 3.3666595267913293e-06, |
|
"loss": 0.8573, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.4932113289833069, |
|
"learning_rate": 3.3446829105752103e-06, |
|
"loss": 0.8174, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.944, |
|
"grad_norm": 0.4785451292991638, |
|
"learning_rate": 3.322742151248726e-06, |
|
"loss": 0.8119, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.948, |
|
"grad_norm": 0.5046617388725281, |
|
"learning_rate": 3.3008377240847955e-06, |
|
"loss": 0.8051, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.952, |
|
"grad_norm": 0.5119112730026245, |
|
"learning_rate": 3.2789701035693242e-06, |
|
"loss": 0.8547, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.956, |
|
"grad_norm": 0.4656376838684082, |
|
"learning_rate": 3.2571397633909252e-06, |
|
"loss": 0.8522, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.5343756675720215, |
|
"learning_rate": 3.2353471764306567e-06, |
|
"loss": 0.8417, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.964, |
|
"grad_norm": 0.47745269536972046, |
|
"learning_rate": 3.2135928147517803e-06, |
|
"loss": 0.7664, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 0.4895247220993042, |
|
"learning_rate": 3.1918771495895395e-06, |
|
"loss": 0.8248, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.972, |
|
"grad_norm": 0.4918672740459442, |
|
"learning_rate": 3.1702006513409393e-06, |
|
"loss": 0.8189, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.976, |
|
"grad_norm": 0.46551230549812317, |
|
"learning_rate": 3.148563789554575e-06, |
|
"loss": 0.8193, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5140730738639832, |
|
"learning_rate": 3.12696703292044e-06, |
|
"loss": 0.8297, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.4619219899177551, |
|
"learning_rate": 3.105410849259796e-06, |
|
"loss": 0.7984, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.988, |
|
"grad_norm": 0.49016299843788147, |
|
"learning_rate": 3.0838957055150136e-06, |
|
"loss": 0.8072, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.992, |
|
"grad_norm": 0.4515412747859955, |
|
"learning_rate": 3.0624220677394854e-06, |
|
"loss": 0.7919, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.996, |
|
"grad_norm": 0.4848708510398865, |
|
"learning_rate": 3.040990401087508e-06, |
|
"loss": 0.8406, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.4492388665676117, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 0.7982, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.004, |
|
"grad_norm": 0.4607851207256317, |
|
"learning_rate": 2.9982548372155264e-06, |
|
"loss": 0.7587, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.008, |
|
"grad_norm": 0.4731832444667816, |
|
"learning_rate": 2.9769518657180953e-06, |
|
"loss": 0.7893, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.012, |
|
"grad_norm": 0.4613538682460785, |
|
"learning_rate": 2.9556927167693107e-06, |
|
"loss": 0.782, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.4291563630104065, |
|
"learning_rate": 2.934477850877292e-06, |
|
"loss": 0.7916, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.41530513763427734, |
|
"learning_rate": 2.9133077275909112e-06, |
|
"loss": 0.7631, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.024, |
|
"grad_norm": 0.39784783124923706, |
|
"learning_rate": 2.892182805489846e-06, |
|
"loss": 0.7966, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.028, |
|
"grad_norm": 0.5020673274993896, |
|
"learning_rate": 2.871103542174637e-06, |
|
"loss": 0.81, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.032, |
|
"grad_norm": 0.5143479704856873, |
|
"learning_rate": 2.8500703942567874e-06, |
|
"loss": 0.7654, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.036, |
|
"grad_norm": 0.455581396818161, |
|
"learning_rate": 2.82908381734886e-06, |
|
"loss": 0.7834, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.44526249170303345, |
|
"learning_rate": 2.8081442660546126e-06, |
|
"loss": 0.8032, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.044, |
|
"grad_norm": 0.4740583896636963, |
|
"learning_rate": 2.7872521939591556e-06, |
|
"loss": 0.7803, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.4611470401287079, |
|
"learning_rate": 2.7664080536191178e-06, |
|
"loss": 0.7928, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.052, |
|
"grad_norm": 0.5031675696372986, |
|
"learning_rate": 2.7456122965528475e-06, |
|
"loss": 0.7821, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.056, |
|
"grad_norm": 0.4245532155036926, |
|
"learning_rate": 2.724865373230632e-06, |
|
"loss": 0.7755, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.48005154728889465, |
|
"learning_rate": 2.7041677330649408e-06, |
|
"loss": 0.7647, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 0.4330499768257141, |
|
"learning_rate": 2.683519824400693e-06, |
|
"loss": 0.7806, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.068, |
|
"grad_norm": 0.4752224087715149, |
|
"learning_rate": 2.662922094505529e-06, |
|
"loss": 0.7562, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.072, |
|
"grad_norm": 0.4366036057472229, |
|
"learning_rate": 2.6423749895601494e-06, |
|
"loss": 0.7736, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.076, |
|
"grad_norm": 0.4761313796043396, |
|
"learning_rate": 2.6218789546486235e-06, |
|
"loss": 0.7694, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.41412127017974854, |
|
"learning_rate": 2.601434433748771e-06, |
|
"loss": 0.7603, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.084, |
|
"grad_norm": 0.4283531606197357, |
|
"learning_rate": 2.581041869722519e-06, |
|
"loss": 0.8048, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.088, |
|
"grad_norm": 0.48063892126083374, |
|
"learning_rate": 2.560701704306336e-06, |
|
"loss": 0.754, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.092, |
|
"grad_norm": 0.47472384572029114, |
|
"learning_rate": 2.540414378101647e-06, |
|
"loss": 0.7941, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.096, |
|
"grad_norm": 0.43496283888816833, |
|
"learning_rate": 2.52018033056529e-06, |
|
"loss": 0.7982, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.46004152297973633, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.7704, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.104, |
|
"grad_norm": 0.4385717809200287, |
|
"learning_rate": 2.4798738235449164e-06, |
|
"loss": 0.7626, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.108, |
|
"grad_norm": 0.45860159397125244, |
|
"learning_rate": 2.4598022371661113e-06, |
|
"loss": 0.7781, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.4268695116043091, |
|
"learning_rate": 2.4397856756471435e-06, |
|
"loss": 0.7615, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.116, |
|
"grad_norm": 0.533136785030365, |
|
"learning_rate": 2.4198245725796427e-06, |
|
"loss": 0.781, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.4675978422164917, |
|
"learning_rate": 2.3999193603539234e-06, |
|
"loss": 0.7935, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.124, |
|
"grad_norm": 0.4748784899711609, |
|
"learning_rate": 2.380070470149605e-06, |
|
"loss": 0.7558, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.128, |
|
"grad_norm": 0.4731289744377136, |
|
"learning_rate": 2.3602783319262847e-06, |
|
"loss": 0.7801, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.132, |
|
"grad_norm": 0.4706327021121979, |
|
"learning_rate": 2.340543374414212e-06, |
|
"loss": 0.7615, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.136, |
|
"grad_norm": 0.5207027196884155, |
|
"learning_rate": 2.320866025105016e-06, |
|
"loss": 0.7831, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.41023868322372437, |
|
"learning_rate": 2.3012467102424373e-06, |
|
"loss": 0.7642, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.144, |
|
"grad_norm": 0.4167621433734894, |
|
"learning_rate": 2.2816858548130837e-06, |
|
"loss": 0.7502, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.148, |
|
"grad_norm": 0.4176595211029053, |
|
"learning_rate": 2.2621838825372496e-06, |
|
"loss": 0.7878, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.152, |
|
"grad_norm": 0.4613254964351654, |
|
"learning_rate": 2.2427412158597133e-06, |
|
"loss": 0.8084, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.156, |
|
"grad_norm": 0.4380951225757599, |
|
"learning_rate": 2.2233582759406065e-06, |
|
"loss": 0.7675, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.43744876980781555, |
|
"learning_rate": 2.204035482646267e-06, |
|
"loss": 0.7903, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.164, |
|
"grad_norm": 0.42547735571861267, |
|
"learning_rate": 2.184773254540169e-06, |
|
"loss": 0.7992, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.168, |
|
"grad_norm": 0.48103204369544983, |
|
"learning_rate": 2.165572008873845e-06, |
|
"loss": 0.7768, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.172, |
|
"grad_norm": 0.5394415855407715, |
|
"learning_rate": 2.146432161577842e-06, |
|
"loss": 0.7797, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.5370150208473206, |
|
"learning_rate": 2.12735412725272e-06, |
|
"loss": 0.7717, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.434938907623291, |
|
"learning_rate": 2.1083383191600676e-06, |
|
"loss": 0.752, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.184, |
|
"grad_norm": 0.4815244674682617, |
|
"learning_rate": 2.0893851492135536e-06, |
|
"loss": 0.7791, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.188, |
|
"grad_norm": 0.4812857210636139, |
|
"learning_rate": 2.0704950279699986e-06, |
|
"loss": 0.7951, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.192, |
|
"grad_norm": 0.49643102288246155, |
|
"learning_rate": 2.0516683646204836e-06, |
|
"loss": 0.7639, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.196, |
|
"grad_norm": 0.4618609845638275, |
|
"learning_rate": 2.0329055669814936e-06, |
|
"loss": 0.7593, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.41718965768814087, |
|
"learning_rate": 2.0142070414860704e-06, |
|
"loss": 0.8132, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.204, |
|
"grad_norm": 0.44279876351356506, |
|
"learning_rate": 1.9955731931750182e-06, |
|
"loss": 0.7773, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 0.4497832953929901, |
|
"learning_rate": 1.977004425688126e-06, |
|
"loss": 0.7781, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.212, |
|
"grad_norm": 0.45165765285491943, |
|
"learning_rate": 1.958501141255427e-06, |
|
"loss": 0.8189, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.216, |
|
"grad_norm": 0.482389897108078, |
|
"learning_rate": 1.9400637406884875e-06, |
|
"loss": 0.7639, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.4482229948043823, |
|
"learning_rate": 1.9216926233717087e-06, |
|
"loss": 0.767, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.224, |
|
"grad_norm": 0.4738655388355255, |
|
"learning_rate": 1.9033881872537009e-06, |
|
"loss": 0.7658, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.228, |
|
"grad_norm": 0.43093428015708923, |
|
"learning_rate": 1.88515082883864e-06, |
|
"loss": 0.7824, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.232, |
|
"grad_norm": 0.4479147791862488, |
|
"learning_rate": 1.8669809431776991e-06, |
|
"loss": 0.8217, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.2359999999999998, |
|
"grad_norm": 0.5110467076301575, |
|
"learning_rate": 1.8488789238604676e-06, |
|
"loss": 0.8179, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.4612603783607483, |
|
"learning_rate": 1.8308451630064484e-06, |
|
"loss": 0.8095, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2439999999999998, |
|
"grad_norm": 0.4464091360569, |
|
"learning_rate": 1.8128800512565514e-06, |
|
"loss": 0.7891, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.248, |
|
"grad_norm": 0.43865087628364563, |
|
"learning_rate": 1.7949839777646327e-06, |
|
"loss": 0.805, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.252, |
|
"grad_norm": 0.4769790768623352, |
|
"learning_rate": 1.7771573301890666e-06, |
|
"loss": 0.7491, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 0.42606115341186523, |
|
"learning_rate": 1.7594004946843458e-06, |
|
"loss": 0.7802, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.46678170561790466, |
|
"learning_rate": 1.7417138558927244e-06, |
|
"loss": 0.7608, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.2640000000000002, |
|
"grad_norm": 0.4411817789077759, |
|
"learning_rate": 1.7240977969358757e-06, |
|
"loss": 0.7541, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.268, |
|
"grad_norm": 0.463142991065979, |
|
"learning_rate": 1.7065526994065973e-06, |
|
"loss": 0.7967, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.2720000000000002, |
|
"grad_norm": 0.49492907524108887, |
|
"learning_rate": 1.6890789433605508e-06, |
|
"loss": 0.7817, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.276, |
|
"grad_norm": 0.48016270995140076, |
|
"learning_rate": 1.671676907308018e-06, |
|
"loss": 0.7586, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.4515690803527832, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 0.7612, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.284, |
|
"grad_norm": 0.4577561020851135, |
|
"learning_rate": 1.6370895014486e-06, |
|
"loss": 0.7828, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.288, |
|
"grad_norm": 0.47520819306373596, |
|
"learning_rate": 1.6199048808617896e-06, |
|
"loss": 0.7579, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.292, |
|
"grad_norm": 0.46000567078590393, |
|
"learning_rate": 1.6027934786924187e-06, |
|
"loss": 0.7288, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.296, |
|
"grad_norm": 0.46916693449020386, |
|
"learning_rate": 1.5857556656015837e-06, |
|
"loss": 0.7974, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.42662757635116577, |
|
"learning_rate": 1.5687918106563326e-06, |
|
"loss": 0.8026, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.4421575665473938, |
|
"learning_rate": 1.551902281321651e-06, |
|
"loss": 0.7705, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.308, |
|
"grad_norm": 0.40408822894096375, |
|
"learning_rate": 1.5350874434525142e-06, |
|
"loss": 0.79, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.312, |
|
"grad_norm": 0.46466535329818726, |
|
"learning_rate": 1.5183476612859538e-06, |
|
"loss": 0.7796, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.316, |
|
"grad_norm": 0.5628266930580139, |
|
"learning_rate": 1.5016832974331725e-06, |
|
"loss": 0.7863, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.41899147629737854, |
|
"learning_rate": 1.4850947128716914e-06, |
|
"loss": 0.7793, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.324, |
|
"grad_norm": 0.415682852268219, |
|
"learning_rate": 1.4685822669375239e-06, |
|
"loss": 0.7962, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.328, |
|
"grad_norm": 0.3926596939563751, |
|
"learning_rate": 1.4521463173173966e-06, |
|
"loss": 0.7752, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.332, |
|
"grad_norm": 0.4485792815685272, |
|
"learning_rate": 1.4357872200409988e-06, |
|
"loss": 0.7563, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.336, |
|
"grad_norm": 0.5413661599159241, |
|
"learning_rate": 1.4195053294732757e-06, |
|
"loss": 0.7666, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.4145437479019165, |
|
"learning_rate": 1.4033009983067454e-06, |
|
"loss": 0.8055, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.344, |
|
"grad_norm": 0.38938337564468384, |
|
"learning_rate": 1.3871745775538598e-06, |
|
"loss": 0.7531, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.348, |
|
"grad_norm": 0.4246900975704193, |
|
"learning_rate": 1.371126416539409e-06, |
|
"loss": 0.7978, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 0.48616209626197815, |
|
"learning_rate": 1.3551568628929434e-06, |
|
"loss": 0.7487, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.356, |
|
"grad_norm": 0.43307557702064514, |
|
"learning_rate": 1.339266262541249e-06, |
|
"loss": 0.7962, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.3837439715862274, |
|
"learning_rate": 1.3234549597008572e-06, |
|
"loss": 0.7901, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.364, |
|
"grad_norm": 0.4052961468696594, |
|
"learning_rate": 1.3077232968705805e-06, |
|
"loss": 0.7982, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.4140758216381073, |
|
"learning_rate": 1.2920716148241036e-06, |
|
"loss": 0.7654, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.372, |
|
"grad_norm": 0.3948996067047119, |
|
"learning_rate": 1.2765002526025871e-06, |
|
"loss": 0.7895, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.376, |
|
"grad_norm": 0.3942357003688812, |
|
"learning_rate": 1.2610095475073415e-06, |
|
"loss": 0.7466, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.4024730324745178, |
|
"learning_rate": 1.2455998350925042e-06, |
|
"loss": 0.7965, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.384, |
|
"grad_norm": 0.4260917603969574, |
|
"learning_rate": 1.2302714491577834e-06, |
|
"loss": 0.7675, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.388, |
|
"grad_norm": 0.42180994153022766, |
|
"learning_rate": 1.2150247217412186e-06, |
|
"loss": 0.7693, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.392, |
|
"grad_norm": 0.3940885663032532, |
|
"learning_rate": 1.1998599831119912e-06, |
|
"loss": 0.7664, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.396, |
|
"grad_norm": 0.4478139281272888, |
|
"learning_rate": 1.1847775617632746e-06, |
|
"loss": 0.7766, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.464181512594223, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.779, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.404, |
|
"grad_norm": 0.38157689571380615, |
|
"learning_rate": 1.1548609759573375e-06, |
|
"loss": 0.7929, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.408, |
|
"grad_norm": 0.4406698942184448, |
|
"learning_rate": 1.1400274595425499e-06, |
|
"loss": 0.7581, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.412, |
|
"grad_norm": 0.39558476209640503, |
|
"learning_rate": 1.1252775564791023e-06, |
|
"loss": 0.7649, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.416, |
|
"grad_norm": 0.4835960865020752, |
|
"learning_rate": 1.1106115862741457e-06, |
|
"loss": 0.7938, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.4387381076812744, |
|
"learning_rate": 1.096029866616704e-06, |
|
"loss": 0.7637, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.424, |
|
"grad_norm": 0.43130138516426086, |
|
"learning_rate": 1.0815327133708015e-06, |
|
"loss": 0.7722, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.428, |
|
"grad_norm": 0.4822551906108856, |
|
"learning_rate": 1.0671204405686108e-06, |
|
"loss": 0.7867, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.40164196491241455, |
|
"learning_rate": 1.052793360403655e-06, |
|
"loss": 0.8015, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.436, |
|
"grad_norm": 0.3969416320323944, |
|
"learning_rate": 1.0385517832240472e-06, |
|
"loss": 0.7635, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.3942321836948395, |
|
"learning_rate": 1.0243960175257605e-06, |
|
"loss": 0.8132, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.444, |
|
"grad_norm": 0.4158990979194641, |
|
"learning_rate": 1.010326369945957e-06, |
|
"loss": 0.7571, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 0.46401944756507874, |
|
"learning_rate": 9.963431452563331e-07, |
|
"loss": 0.7628, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.452, |
|
"grad_norm": 0.425135999917984, |
|
"learning_rate": 9.824466463565246e-07, |
|
"loss": 0.787, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.456, |
|
"grad_norm": 0.45811745524406433, |
|
"learning_rate": 9.686371742675443e-07, |
|
"loss": 0.7998, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.41915279626846313, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.7981, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.464, |
|
"grad_norm": 0.4088100790977478, |
|
"learning_rate": 9.412805051739266e-07, |
|
"loss": 0.7642, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.468, |
|
"grad_norm": 0.4364009499549866, |
|
"learning_rate": 9.277339007597158e-07, |
|
"loss": 0.784, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.472, |
|
"grad_norm": 0.38804692029953003, |
|
"learning_rate": 9.142755083243577e-07, |
|
"loss": 0.7681, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.476, |
|
"grad_norm": 0.38228940963745117, |
|
"learning_rate": 9.009056193987569e-07, |
|
"loss": 0.7979, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.38130438327789307, |
|
"learning_rate": 8.876245235966884e-07, |
|
"loss": 0.7802, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.484, |
|
"grad_norm": 0.4364047944545746, |
|
"learning_rate": 8.744325086085248e-07, |
|
"loss": 0.7582, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.488, |
|
"grad_norm": 0.3930681049823761, |
|
"learning_rate": 8.613298601949971e-07, |
|
"loss": 0.7943, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.492, |
|
"grad_norm": 0.406017005443573, |
|
"learning_rate": 8.483168621810133e-07, |
|
"loss": 0.799, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.4273027181625366, |
|
"learning_rate": 8.353937964495029e-07, |
|
"loss": 0.747, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.4582826793193817, |
|
"learning_rate": 8.225609429353187e-07, |
|
"loss": 0.772, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.504, |
|
"grad_norm": 0.4531767666339874, |
|
"learning_rate": 8.098185796191632e-07, |
|
"loss": 0.7802, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.508, |
|
"grad_norm": 0.4155261814594269, |
|
"learning_rate": 7.971669825215789e-07, |
|
"loss": 0.7883, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.512, |
|
"grad_norm": 0.4580751657485962, |
|
"learning_rate": 7.846064256969571e-07, |
|
"loss": 0.7879, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.516, |
|
"grad_norm": 0.4104625880718231, |
|
"learning_rate": 7.72137181227608e-07, |
|
"loss": 0.797, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.40306803584098816, |
|
"learning_rate": 7.597595192178702e-07, |
|
"loss": 0.7693, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.524, |
|
"grad_norm": 0.44633400440216064, |
|
"learning_rate": 7.47473707788251e-07, |
|
"loss": 0.7791, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.528, |
|
"grad_norm": 0.40079960227012634, |
|
"learning_rate": 7.352800130696253e-07, |
|
"loss": 0.7548, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.532, |
|
"grad_norm": 0.41967469453811646, |
|
"learning_rate": 7.23178699197467e-07, |
|
"loss": 0.7598, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.536, |
|
"grad_norm": 0.3938508927822113, |
|
"learning_rate": 7.111700283061318e-07, |
|
"loss": 0.781, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.4104136824607849, |
|
"learning_rate": 6.992542605231739e-07, |
|
"loss": 0.7823, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 0.411960244178772, |
|
"learning_rate": 6.874316539637127e-07, |
|
"loss": 0.7728, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.548, |
|
"grad_norm": 0.41505640745162964, |
|
"learning_rate": 6.757024647248456e-07, |
|
"loss": 0.8157, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.552, |
|
"grad_norm": 0.38173797726631165, |
|
"learning_rate": 6.640669468800947e-07, |
|
"loss": 0.7474, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.556, |
|
"grad_norm": 0.42165470123291016, |
|
"learning_rate": 6.52525352473905e-07, |
|
"loss": 0.7731, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.4714242219924927, |
|
"learning_rate": 6.410779315161885e-07, |
|
"loss": 0.75, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.564, |
|
"grad_norm": 0.41340571641921997, |
|
"learning_rate": 6.297249319769016e-07, |
|
"loss": 0.755, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.568, |
|
"grad_norm": 0.43762728571891785, |
|
"learning_rate": 6.184665997806832e-07, |
|
"loss": 0.7378, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.572, |
|
"grad_norm": 0.41566336154937744, |
|
"learning_rate": 6.073031788015133e-07, |
|
"loss": 0.7588, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.576, |
|
"grad_norm": 0.3761170506477356, |
|
"learning_rate": 5.962349108574478e-07, |
|
"loss": 0.7837, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.4188023805618286, |
|
"learning_rate": 5.852620357053651e-07, |
|
"loss": 0.7922, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.584, |
|
"grad_norm": 0.39329683780670166, |
|
"learning_rate": 5.743847910357836e-07, |
|
"loss": 0.7545, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.588, |
|
"grad_norm": 0.3915480375289917, |
|
"learning_rate": 5.636034124677043e-07, |
|
"loss": 0.8032, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 0.4099782705307007, |
|
"learning_rate": 5.529181335435124e-07, |
|
"loss": 0.781, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.596, |
|
"grad_norm": 0.3995542824268341, |
|
"learning_rate": 5.423291857239177e-07, |
|
"loss": 0.7701, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.4187732934951782, |
|
"learning_rate": 5.318367983829393e-07, |
|
"loss": 0.7526, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.604, |
|
"grad_norm": 0.4345127046108246, |
|
"learning_rate": 5.214411988029355e-07, |
|
"loss": 0.7531, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.608, |
|
"grad_norm": 0.3876914381980896, |
|
"learning_rate": 5.111426121696866e-07, |
|
"loss": 0.7993, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.612, |
|
"grad_norm": 0.44101542234420776, |
|
"learning_rate": 5.009412615675102e-07, |
|
"loss": 0.8017, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.616, |
|
"grad_norm": 0.36027947068214417, |
|
"learning_rate": 4.908373679744316e-07, |
|
"loss": 0.7407, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.38491421937942505, |
|
"learning_rate": 4.808311502573976e-07, |
|
"loss": 0.8001, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.41484707593917847, |
|
"learning_rate": 4.709228251675357e-07, |
|
"loss": 0.8007, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.628, |
|
"grad_norm": 0.3889584541320801, |
|
"learning_rate": 4.6111260733545714e-07, |
|
"loss": 0.8044, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.632, |
|
"grad_norm": 0.3918435275554657, |
|
"learning_rate": 4.514007092666084e-07, |
|
"loss": 0.7936, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.636, |
|
"grad_norm": 0.4111655652523041, |
|
"learning_rate": 4.417873413366702e-07, |
|
"loss": 0.7905, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.4380398988723755, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 0.781, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.644, |
|
"grad_norm": 0.4324621856212616, |
|
"learning_rate": 4.228570267201049e-07, |
|
"loss": 0.822, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.648, |
|
"grad_norm": 0.409640371799469, |
|
"learning_rate": 4.1354049009521504e-07, |
|
"loss": 0.7982, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.652, |
|
"grad_norm": 0.4422895014286041, |
|
"learning_rate": 4.043233037238281e-07, |
|
"loss": 0.7581, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.656, |
|
"grad_norm": 0.39165276288986206, |
|
"learning_rate": 3.9520566726535367e-07, |
|
"loss": 0.8131, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.37336215376853943, |
|
"learning_rate": 3.8618777822278854e-07, |
|
"loss": 0.7423, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.664, |
|
"grad_norm": 0.46584251523017883, |
|
"learning_rate": 3.772698319384349e-07, |
|
"loss": 0.727, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.668, |
|
"grad_norm": 0.39146313071250916, |
|
"learning_rate": 3.684520215896703e-07, |
|
"loss": 0.7862, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.672, |
|
"grad_norm": 0.39932945370674133, |
|
"learning_rate": 3.597345381847656e-07, |
|
"loss": 0.7574, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.676, |
|
"grad_norm": 0.40844547748565674, |
|
"learning_rate": 3.511175705587433e-07, |
|
"loss": 0.7754, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.4052230715751648, |
|
"learning_rate": 3.426013053692878e-07, |
|
"loss": 0.7575, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.684, |
|
"grad_norm": 0.40574780106544495, |
|
"learning_rate": 3.341859270927067e-07, |
|
"loss": 0.7761, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.3910069465637207, |
|
"learning_rate": 3.258716180199278e-07, |
|
"loss": 0.7723, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.692, |
|
"grad_norm": 0.3847978115081787, |
|
"learning_rate": 3.1765855825255543e-07, |
|
"loss": 0.7849, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.6959999999999997, |
|
"grad_norm": 0.3743051290512085, |
|
"learning_rate": 3.0954692569896585e-07, |
|
"loss": 0.7825, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.43019863963127136, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.7791, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.7039999999999997, |
|
"grad_norm": 0.42280012369155884, |
|
"learning_rate": 2.9362864287744266e-07, |
|
"loss": 0.7682, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.708, |
|
"grad_norm": 0.4100990891456604, |
|
"learning_rate": 2.858223374256841e-07, |
|
"loss": 0.7816, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.7119999999999997, |
|
"grad_norm": 0.4392656981945038, |
|
"learning_rate": 2.7811814881259503e-07, |
|
"loss": 0.7645, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.716, |
|
"grad_norm": 0.37788864970207214, |
|
"learning_rate": 2.705162439235648e-07, |
|
"loss": 0.7793, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.3875264823436737, |
|
"learning_rate": 2.63016787428354e-07, |
|
"loss": 0.7777, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.724, |
|
"grad_norm": 0.41191229224205017, |
|
"learning_rate": 2.556199417775174e-07, |
|
"loss": 0.8064, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.7279999999999998, |
|
"grad_norm": 0.3729294538497925, |
|
"learning_rate": 2.483258671988942e-07, |
|
"loss": 0.7664, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.732, |
|
"grad_norm": 0.39030614495277405, |
|
"learning_rate": 2.4113472169413176e-07, |
|
"loss": 0.768, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 0.3516885042190552, |
|
"learning_rate": 2.3404666103526542e-07, |
|
"loss": 0.7609, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.38836079835891724, |
|
"learning_rate": 2.2706183876134047e-07, |
|
"loss": 0.8086, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.7439999999999998, |
|
"grad_norm": 0.40275564789772034, |
|
"learning_rate": 2.2018040617509174e-07, |
|
"loss": 0.8176, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.748, |
|
"grad_norm": 0.3835076689720154, |
|
"learning_rate": 2.134025123396638e-07, |
|
"loss": 0.7824, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.3846163749694824, |
|
"learning_rate": 2.0672830407537925e-07, |
|
"loss": 0.775, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.7560000000000002, |
|
"grad_norm": 0.4121825397014618, |
|
"learning_rate": 2.0015792595656225e-07, |
|
"loss": 0.8086, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.39317595958709717, |
|
"learning_rate": 1.9369152030840553e-07, |
|
"loss": 0.7897, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.7640000000000002, |
|
"grad_norm": 0.4010584354400635, |
|
"learning_rate": 1.873292272038868e-07, |
|
"loss": 0.7258, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.768, |
|
"grad_norm": 0.39328062534332275, |
|
"learning_rate": 1.8107118446073492e-07, |
|
"loss": 0.7727, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.7720000000000002, |
|
"grad_norm": 0.4069308042526245, |
|
"learning_rate": 1.7491752763844294e-07, |
|
"loss": 0.7364, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.776, |
|
"grad_norm": 0.4272211790084839, |
|
"learning_rate": 1.688683900353366e-07, |
|
"loss": 0.769, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.7800000000000002, |
|
"grad_norm": 0.3933575749397278, |
|
"learning_rate": 1.6292390268568103e-07, |
|
"loss": 0.8167, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 0.416881799697876, |
|
"learning_rate": 1.5708419435684463e-07, |
|
"loss": 0.7854, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.7880000000000003, |
|
"grad_norm": 0.3927464187145233, |
|
"learning_rate": 1.5134939154651196e-07, |
|
"loss": 0.7785, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.792, |
|
"grad_norm": 0.3979874551296234, |
|
"learning_rate": 1.4571961847993977e-07, |
|
"loss": 0.782, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.7960000000000003, |
|
"grad_norm": 0.4113554060459137, |
|
"learning_rate": 1.4019499710726913e-07, |
|
"loss": 0.776, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.40728065371513367, |
|
"learning_rate": 1.3477564710088097e-07, |
|
"loss": 0.7813, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.8040000000000003, |
|
"grad_norm": 0.37924978137016296, |
|
"learning_rate": 1.294616858528064e-07, |
|
"loss": 0.7608, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.808, |
|
"grad_norm": 0.4243476986885071, |
|
"learning_rate": 1.2425322847218368e-07, |
|
"loss": 0.7679, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.8120000000000003, |
|
"grad_norm": 0.385475754737854, |
|
"learning_rate": 1.1915038778276212e-07, |
|
"loss": 0.7511, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.40648695826530457, |
|
"learning_rate": 1.1415327432046041e-07, |
|
"loss": 0.7406, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.39061108231544495, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 0.8138, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.824, |
|
"grad_norm": 0.4108492434024811, |
|
"learning_rate": 1.044766597674196e-07, |
|
"loss": 0.7478, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.828, |
|
"grad_norm": 0.3769419193267822, |
|
"learning_rate": 9.979736828806096e-08, |
|
"loss": 0.7825, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 0.39844822883605957, |
|
"learning_rate": 9.522422325404234e-08, |
|
"loss": 0.8049, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.836, |
|
"grad_norm": 0.39169108867645264, |
|
"learning_rate": 9.075732372720414e-08, |
|
"loss": 0.7697, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.3846679925918579, |
|
"learning_rate": 8.639676646793382e-08, |
|
"loss": 0.7875, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.844, |
|
"grad_norm": 0.3852086365222931, |
|
"learning_rate": 8.214264593307097e-08, |
|
"loss": 0.7587, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.848, |
|
"grad_norm": 0.43485692143440247, |
|
"learning_rate": 7.799505427386001e-08, |
|
"loss": 0.7964, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.852, |
|
"grad_norm": 0.3984343111515045, |
|
"learning_rate": 7.395408133395509e-08, |
|
"loss": 0.7781, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.856, |
|
"grad_norm": 0.4015841782093048, |
|
"learning_rate": 7.001981464747565e-08, |
|
"loss": 0.7413, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.3932044804096222, |
|
"learning_rate": 6.61923394371039e-08, |
|
"loss": 0.7592, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.864, |
|
"grad_norm": 0.38348260521888733, |
|
"learning_rate": 6.247173861224753e-08, |
|
"loss": 0.7833, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.868, |
|
"grad_norm": 0.39616912603378296, |
|
"learning_rate": 5.8858092767236084e-08, |
|
"loss": 0.8241, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.872, |
|
"grad_norm": 0.39276373386383057, |
|
"learning_rate": 5.535148017958014e-08, |
|
"loss": 0.775, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.876, |
|
"grad_norm": 0.3859685957431793, |
|
"learning_rate": 5.19519768082738e-08, |
|
"loss": 0.7998, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3932885229587555, |
|
"learning_rate": 4.865965629214819e-08, |
|
"loss": 0.728, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.884, |
|
"grad_norm": 0.3808436989784241, |
|
"learning_rate": 4.5474589948280026e-08, |
|
"loss": 0.7968, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.888, |
|
"grad_norm": 0.39818060398101807, |
|
"learning_rate": 4.2396846770441644e-08, |
|
"loss": 0.798, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.892, |
|
"grad_norm": 0.41582825779914856, |
|
"learning_rate": 3.9426493427611177e-08, |
|
"loss": 0.7847, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.896, |
|
"grad_norm": 0.38983744382858276, |
|
"learning_rate": 3.65635942625242e-08, |
|
"loss": 0.8021, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.38485726714134216, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.8147, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.904, |
|
"grad_norm": 0.39613547921180725, |
|
"learning_rate": 3.1160404197018155e-08, |
|
"loss": 0.794, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.908, |
|
"grad_norm": 0.40311679244041443, |
|
"learning_rate": 2.8620230338578526e-08, |
|
"loss": 0.7908, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.912, |
|
"grad_norm": 0.3574402630329132, |
|
"learning_rate": 2.6187744739308297e-08, |
|
"loss": 0.8137, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.916, |
|
"grad_norm": 0.3904540538787842, |
|
"learning_rate": 2.386300009084408e-08, |
|
"loss": 0.764, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3986371159553528, |
|
"learning_rate": 2.1646046750978255e-08, |
|
"loss": 0.7881, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.924, |
|
"grad_norm": 0.39145928621292114, |
|
"learning_rate": 1.953693274256374e-08, |
|
"loss": 0.764, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 0.38288819789886475, |
|
"learning_rate": 1.753570375247815e-08, |
|
"loss": 0.7774, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.932, |
|
"grad_norm": 0.3810698986053467, |
|
"learning_rate": 1.5642403130632367e-08, |
|
"loss": 0.7491, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.936, |
|
"grad_norm": 0.3975498080253601, |
|
"learning_rate": 1.3857071889029073e-08, |
|
"loss": 0.7755, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.38917872309684753, |
|
"learning_rate": 1.2179748700879013e-08, |
|
"loss": 0.7924, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.38839098811149597, |
|
"learning_rate": 1.0610469899760001e-08, |
|
"loss": 0.8068, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.948, |
|
"grad_norm": 0.38586539030075073, |
|
"learning_rate": 9.14926947883088e-09, |
|
"loss": 0.7816, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.952, |
|
"grad_norm": 0.4118897020816803, |
|
"learning_rate": 7.796179090094891e-09, |
|
"loss": 0.7837, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.956, |
|
"grad_norm": 0.36527642607688904, |
|
"learning_rate": 6.551228043715218e-09, |
|
"loss": 0.8002, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3902311623096466, |
|
"learning_rate": 5.414443307377171e-09, |
|
"loss": 0.7759, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.964, |
|
"grad_norm": 0.37341415882110596, |
|
"learning_rate": 4.385849505708084e-09, |
|
"loss": 0.7821, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.968, |
|
"grad_norm": 0.40262770652770996, |
|
"learning_rate": 3.4654689197405335e-09, |
|
"loss": 0.7754, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.972, |
|
"grad_norm": 0.37969642877578735, |
|
"learning_rate": 2.6533214864310485e-09, |
|
"loss": 0.7755, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 0.3812060058116913, |
|
"learning_rate": 1.9494247982282386e-09, |
|
"loss": 0.7779, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.3999031186103821, |
|
"learning_rate": 1.3537941026914302e-09, |
|
"loss": 0.7995, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.984, |
|
"grad_norm": 0.36233609914779663, |
|
"learning_rate": 8.664423021614854e-10, |
|
"loss": 0.7679, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.988, |
|
"grad_norm": 0.4313134253025055, |
|
"learning_rate": 4.87379953478806e-10, |
|
"loss": 0.8057, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.992, |
|
"grad_norm": 0.3804738223552704, |
|
"learning_rate": 2.1661526775795804e-10, |
|
"loss": 0.7728, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.996, |
|
"grad_norm": 0.410442054271698, |
|
"learning_rate": 5.4154110206150465e-11, |
|
"loss": 0.7773, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.37331917881965637, |
|
"learning_rate": 0.0, |
|
"loss": 0.7804, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 750, |
|
"total_flos": 750914578743296.0, |
|
"train_loss": 0.842607304652532, |
|
"train_runtime": 42164.2983, |
|
"train_samples_per_second": 1.708, |
|
"train_steps_per_second": 0.018 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 750, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 750914578743296.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|