|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 375, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 6.951127529144287, |
|
"learning_rate": 2.6315789473684213e-07, |
|
"loss": 1.2947, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 6.943446636199951, |
|
"learning_rate": 5.263157894736843e-07, |
|
"loss": 1.2776, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 6.9143147468566895, |
|
"learning_rate": 7.894736842105263e-07, |
|
"loss": 1.272, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 6.79904317855835, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 1.2738, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.621460437774658, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 1.2432, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 6.126567840576172, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 1.2115, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.056, |
|
"grad_norm": 5.147610187530518, |
|
"learning_rate": 1.8421052631578948e-06, |
|
"loss": 1.2493, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 4.801726818084717, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.2107, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 4.432796001434326, |
|
"learning_rate": 2.368421052631579e-06, |
|
"loss": 1.1829, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.735476016998291, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.1258, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.088, |
|
"grad_norm": 2.625633716583252, |
|
"learning_rate": 2.8947368421052634e-06, |
|
"loss": 1.1722, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 2.6213021278381348, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.1329, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.104, |
|
"grad_norm": 4.202518463134766, |
|
"learning_rate": 3.421052631578948e-06, |
|
"loss": 1.1283, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.112, |
|
"grad_norm": 4.3722615242004395, |
|
"learning_rate": 3.6842105263157896e-06, |
|
"loss": 1.1362, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.284238338470459, |
|
"learning_rate": 3.947368421052632e-06, |
|
"loss": 1.122, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 3.9620132446289062, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.098, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.136, |
|
"grad_norm": 2.9353275299072266, |
|
"learning_rate": 4.473684210526316e-06, |
|
"loss": 1.0722, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 2.8066329956054688, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 1.0788, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.152, |
|
"grad_norm": 2.426243543624878, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0823, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.9383955001831055, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.0643, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.168, |
|
"grad_norm": 1.5883516073226929, |
|
"learning_rate": 5.526315789473685e-06, |
|
"loss": 1.0186, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.176, |
|
"grad_norm": 1.5131279230117798, |
|
"learning_rate": 5.789473684210527e-06, |
|
"loss": 1.0316, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.184, |
|
"grad_norm": 1.5010865926742554, |
|
"learning_rate": 6.0526315789473685e-06, |
|
"loss": 1.0223, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 1.1548634767532349, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 0.9821, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.17434561252594, |
|
"learning_rate": 6.578947368421054e-06, |
|
"loss": 0.9551, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.208, |
|
"grad_norm": 1.1539326906204224, |
|
"learning_rate": 6.842105263157896e-06, |
|
"loss": 0.9725, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.216, |
|
"grad_norm": 1.1245882511138916, |
|
"learning_rate": 7.1052631578947375e-06, |
|
"loss": 0.9684, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 1.1288598775863647, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 0.9963, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.232, |
|
"grad_norm": 1.0191954374313354, |
|
"learning_rate": 7.631578947368423e-06, |
|
"loss": 1.0004, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9676952362060547, |
|
"learning_rate": 7.894736842105265e-06, |
|
"loss": 0.9658, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.248, |
|
"grad_norm": 1.1406854391098022, |
|
"learning_rate": 8.157894736842106e-06, |
|
"loss": 0.9879, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.9583864212036133, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.9472, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.264, |
|
"grad_norm": 0.9609050750732422, |
|
"learning_rate": 8.68421052631579e-06, |
|
"loss": 0.978, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.272, |
|
"grad_norm": 0.8762646913528442, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 0.9468, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8770004510879517, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 0.9643, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.8456284403800964, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.9534, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.296, |
|
"grad_norm": 0.9351610541343689, |
|
"learning_rate": 9.736842105263159e-06, |
|
"loss": 0.9389, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.304, |
|
"grad_norm": 0.8555827736854553, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9065, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.312, |
|
"grad_norm": 0.8321928977966309, |
|
"learning_rate": 9.99978274148479e-06, |
|
"loss": 0.9469, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7721774578094482, |
|
"learning_rate": 9.999130984819662e-06, |
|
"loss": 0.9243, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.328, |
|
"grad_norm": 0.7928785085678101, |
|
"learning_rate": 9.998044786644492e-06, |
|
"loss": 0.9615, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 0.6871277093887329, |
|
"learning_rate": 9.9965242413536e-06, |
|
"loss": 0.9437, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.344, |
|
"grad_norm": 0.7224629521369934, |
|
"learning_rate": 9.994569481087552e-06, |
|
"loss": 0.9486, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 0.8585007190704346, |
|
"learning_rate": 9.992180675721671e-06, |
|
"loss": 0.9382, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.7822074294090271, |
|
"learning_rate": 9.989358032851283e-06, |
|
"loss": 0.9179, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.368, |
|
"grad_norm": 0.7207604646682739, |
|
"learning_rate": 9.986101797773667e-06, |
|
"loss": 0.9395, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.376, |
|
"grad_norm": 0.889811635017395, |
|
"learning_rate": 9.98241225346674e-06, |
|
"loss": 0.886, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.8683562874794006, |
|
"learning_rate": 9.978289720564471e-06, |
|
"loss": 0.8996, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.392, |
|
"grad_norm": 0.7052727341651917, |
|
"learning_rate": 9.97373455732901e-06, |
|
"loss": 0.9052, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8796941041946411, |
|
"learning_rate": 9.968747159619556e-06, |
|
"loss": 0.9217, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.408, |
|
"grad_norm": 0.7054159641265869, |
|
"learning_rate": 9.963327960857962e-06, |
|
"loss": 0.9534, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 0.7010794281959534, |
|
"learning_rate": 9.957477431991053e-06, |
|
"loss": 0.904, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.424, |
|
"grad_norm": 0.7052696347236633, |
|
"learning_rate": 9.95119608144972e-06, |
|
"loss": 0.9093, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 0.6790952086448669, |
|
"learning_rate": 9.944484455104716e-06, |
|
"loss": 0.9236, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6545783877372742, |
|
"learning_rate": 9.937343136219234e-06, |
|
"loss": 0.9295, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.6248077154159546, |
|
"learning_rate": 9.929772745398207e-06, |
|
"loss": 0.9171, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.456, |
|
"grad_norm": 0.6094949245452881, |
|
"learning_rate": 9.921773940534382e-06, |
|
"loss": 0.8842, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.464, |
|
"grad_norm": 0.7010917663574219, |
|
"learning_rate": 9.913347416751148e-06, |
|
"loss": 0.9079, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.472, |
|
"grad_norm": 0.7240394353866577, |
|
"learning_rate": 9.904493906342124e-06, |
|
"loss": 0.9433, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.71834397315979, |
|
"learning_rate": 9.895214178707516e-06, |
|
"loss": 0.9086, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.488, |
|
"grad_norm": 0.7287778854370117, |
|
"learning_rate": 9.885509040287267e-06, |
|
"loss": 0.8941, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.496, |
|
"grad_norm": 0.6156062483787537, |
|
"learning_rate": 9.875379334490962e-06, |
|
"loss": 0.8547, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.504, |
|
"grad_norm": 0.6256719827651978, |
|
"learning_rate": 9.864825941624538e-06, |
|
"loss": 0.8925, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.6274106502532959, |
|
"learning_rate": 9.853849778813777e-06, |
|
"loss": 0.9073, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6543683409690857, |
|
"learning_rate": 9.842451799924616e-06, |
|
"loss": 0.9398, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 0.6689018607139587, |
|
"learning_rate": 9.830632995480243e-06, |
|
"loss": 0.9149, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.536, |
|
"grad_norm": 0.6306348443031311, |
|
"learning_rate": 9.818394392575018e-06, |
|
"loss": 0.8812, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 0.7672179937362671, |
|
"learning_rate": 9.805737054785223e-06, |
|
"loss": 0.9074, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.552, |
|
"grad_norm": 0.7313401103019714, |
|
"learning_rate": 9.792662082076618e-06, |
|
"loss": 0.8983, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6449005007743835, |
|
"learning_rate": 9.779170610708872e-06, |
|
"loss": 0.95, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.568, |
|
"grad_norm": 0.7076218128204346, |
|
"learning_rate": 9.765263813136796e-06, |
|
"loss": 0.8961, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.6033435463905334, |
|
"learning_rate": 9.750942897908468e-06, |
|
"loss": 0.8845, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.584, |
|
"grad_norm": 0.6751191020011902, |
|
"learning_rate": 9.736209109560201e-06, |
|
"loss": 0.9402, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.592, |
|
"grad_norm": 0.6485176682472229, |
|
"learning_rate": 9.721063728508384e-06, |
|
"loss": 0.8936, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.6805006265640259, |
|
"learning_rate": 9.705508070938219e-06, |
|
"loss": 0.9238, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 0.7293475270271301, |
|
"learning_rate": 9.689543488689332e-06, |
|
"loss": 0.8917, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.616, |
|
"grad_norm": 0.6713281869888306, |
|
"learning_rate": 9.673171369138297e-06, |
|
"loss": 0.9051, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 0.7109168171882629, |
|
"learning_rate": 9.656393135078067e-06, |
|
"loss": 0.9147, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.632, |
|
"grad_norm": 0.7239603400230408, |
|
"learning_rate": 9.639210244594335e-06, |
|
"loss": 0.878, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6019280552864075, |
|
"learning_rate": 9.621624190938802e-06, |
|
"loss": 0.8919, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.648, |
|
"grad_norm": 0.7745220065116882, |
|
"learning_rate": 9.603636502399436e-06, |
|
"loss": 0.8745, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.656, |
|
"grad_norm": 0.8410776853561401, |
|
"learning_rate": 9.585248742167638e-06, |
|
"loss": 0.9138, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.664, |
|
"grad_norm": 0.5263102054595947, |
|
"learning_rate": 9.566462508202403e-06, |
|
"loss": 0.8924, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.9095488786697388, |
|
"learning_rate": 9.547279433091446e-06, |
|
"loss": 0.9034, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6388502717018127, |
|
"learning_rate": 9.527701183909336e-06, |
|
"loss": 0.9433, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.688, |
|
"grad_norm": 0.5810441970825195, |
|
"learning_rate": 9.507729462072615e-06, |
|
"loss": 0.9063, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.696, |
|
"grad_norm": 0.6178363561630249, |
|
"learning_rate": 9.48736600319193e-06, |
|
"loss": 0.8911, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.6862829923629761, |
|
"learning_rate": 9.466612576921223e-06, |
|
"loss": 0.8723, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.712, |
|
"grad_norm": 0.6216509938240051, |
|
"learning_rate": 9.445470986803922e-06, |
|
"loss": 0.9128, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6560526490211487, |
|
"learning_rate": 9.423943070116219e-06, |
|
"loss": 0.8976, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.728, |
|
"grad_norm": 0.6992607116699219, |
|
"learning_rate": 9.402030697707398e-06, |
|
"loss": 0.9017, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 0.6775327920913696, |
|
"learning_rate": 9.37973577383726e-06, |
|
"loss": 0.9186, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.744, |
|
"grad_norm": 0.5652820467948914, |
|
"learning_rate": 9.357060236010626e-06, |
|
"loss": 0.9105, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.752, |
|
"grad_norm": 0.6999420523643494, |
|
"learning_rate": 9.334006054808966e-06, |
|
"loss": 0.9206, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6290011405944824, |
|
"learning_rate": 9.310575233719155e-06, |
|
"loss": 0.9198, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.7411805987358093, |
|
"learning_rate": 9.28676980895935e-06, |
|
"loss": 0.8981, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.776, |
|
"grad_norm": 0.600430965423584, |
|
"learning_rate": 9.262591849302049e-06, |
|
"loss": 0.8867, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.784, |
|
"grad_norm": 0.6421351432800293, |
|
"learning_rate": 9.238043455894294e-06, |
|
"loss": 0.9053, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.792, |
|
"grad_norm": 0.7420929670333862, |
|
"learning_rate": 9.213126762075088e-06, |
|
"loss": 0.8963, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5919941663742065, |
|
"learning_rate": 9.187843933189994e-06, |
|
"loss": 0.8234, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.808, |
|
"grad_norm": 0.7523407340049744, |
|
"learning_rate": 9.162197166402957e-06, |
|
"loss": 0.9065, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 0.6169841885566711, |
|
"learning_rate": 9.136188690505363e-06, |
|
"loss": 0.9195, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.824, |
|
"grad_norm": 0.6386623382568359, |
|
"learning_rate": 9.109820765722357e-06, |
|
"loss": 0.8867, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.5823110342025757, |
|
"learning_rate": 9.083095683516414e-06, |
|
"loss": 0.9039, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5956842303276062, |
|
"learning_rate": 9.056015766388205e-06, |
|
"loss": 0.9004, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.848, |
|
"grad_norm": 0.5407257676124573, |
|
"learning_rate": 9.028583367674767e-06, |
|
"loss": 0.8644, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.856, |
|
"grad_norm": 0.6211457252502441, |
|
"learning_rate": 9.00080087134498e-06, |
|
"loss": 0.9033, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.5631619095802307, |
|
"learning_rate": 8.972670691792409e-06, |
|
"loss": 0.846, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.872, |
|
"grad_norm": 0.5860468745231628, |
|
"learning_rate": 8.944195273625472e-06, |
|
"loss": 0.9196, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5991073846817017, |
|
"learning_rate": 8.915377091454992e-06, |
|
"loss": 0.8949, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.888, |
|
"grad_norm": 0.5442657470703125, |
|
"learning_rate": 8.886218649679162e-06, |
|
"loss": 0.8848, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.49064090847969055, |
|
"learning_rate": 8.856722482265886e-06, |
|
"loss": 0.8571, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.904, |
|
"grad_norm": 0.6109375953674316, |
|
"learning_rate": 8.826891152532579e-06, |
|
"loss": 0.874, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 0.5844782590866089, |
|
"learning_rate": 8.796727252923403e-06, |
|
"loss": 0.8715, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5409021377563477, |
|
"learning_rate": 8.766233404783975e-06, |
|
"loss": 0.8836, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 0.5358095169067383, |
|
"learning_rate": 8.735412258133562e-06, |
|
"loss": 0.8686, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.936, |
|
"grad_norm": 0.6133047938346863, |
|
"learning_rate": 8.704266491434787e-06, |
|
"loss": 0.8661, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.944, |
|
"grad_norm": 0.6640852093696594, |
|
"learning_rate": 8.672798811360863e-06, |
|
"loss": 0.8357, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.952, |
|
"grad_norm": 0.6169326901435852, |
|
"learning_rate": 8.641011952560372e-06, |
|
"loss": 0.8542, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6841595768928528, |
|
"learning_rate": 8.608908677419606e-06, |
|
"loss": 0.898, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.968, |
|
"grad_norm": 0.6243302822113037, |
|
"learning_rate": 8.576491775822527e-06, |
|
"loss": 0.8579, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.976, |
|
"grad_norm": 0.5553723573684692, |
|
"learning_rate": 8.543764064908295e-06, |
|
"loss": 0.8628, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.984, |
|
"grad_norm": 0.6372663974761963, |
|
"learning_rate": 8.510728388826464e-06, |
|
"loss": 0.8672, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 0.6535351872444153, |
|
"learning_rate": 8.477387618489808e-06, |
|
"loss": 0.849, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6568986773490906, |
|
"learning_rate": 8.443744651324828e-06, |
|
"loss": 0.8512, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 0.6753949522972107, |
|
"learning_rate": 8.409802411019962e-06, |
|
"loss": 0.8784, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.016, |
|
"grad_norm": 0.7170552015304565, |
|
"learning_rate": 8.375563847271506e-06, |
|
"loss": 0.8431, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.6562941074371338, |
|
"learning_rate": 8.341031935527267e-06, |
|
"loss": 0.8489, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.032, |
|
"grad_norm": 0.6256612539291382, |
|
"learning_rate": 8.306209676727994e-06, |
|
"loss": 0.8586, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.7772700190544128, |
|
"learning_rate": 8.271100097046585e-06, |
|
"loss": 0.8498, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.048, |
|
"grad_norm": 0.6408899426460266, |
|
"learning_rate": 8.235706247625098e-06, |
|
"loss": 0.8249, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 0.5903573632240295, |
|
"learning_rate": 8.200031204309604e-06, |
|
"loss": 0.8051, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.064, |
|
"grad_norm": 0.7947317957878113, |
|
"learning_rate": 8.16407806738288e-06, |
|
"loss": 0.8263, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.072, |
|
"grad_norm": 0.6758819818496704, |
|
"learning_rate": 8.127849961294984e-06, |
|
"loss": 0.801, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.682586669921875, |
|
"learning_rate": 8.091350034391732e-06, |
|
"loss": 0.8507, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.7249560356140137, |
|
"learning_rate": 8.05458145864109e-06, |
|
"loss": 0.836, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.096, |
|
"grad_norm": 0.6341940760612488, |
|
"learning_rate": 8.017547429357532e-06, |
|
"loss": 0.846, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 0.7217475175857544, |
|
"learning_rate": 7.980251164924342e-06, |
|
"loss": 0.8489, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.112, |
|
"grad_norm": 0.5944041609764099, |
|
"learning_rate": 7.94269590651393e-06, |
|
"loss": 0.8149, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.6496677994728088, |
|
"learning_rate": 7.904884917806174e-06, |
|
"loss": 0.8401, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.1280000000000001, |
|
"grad_norm": 0.604494571685791, |
|
"learning_rate": 7.866821484704777e-06, |
|
"loss": 0.8379, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.1360000000000001, |
|
"grad_norm": 0.5416861772537231, |
|
"learning_rate": 7.828508915051724e-06, |
|
"loss": 0.817, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.144, |
|
"grad_norm": 0.5813770890235901, |
|
"learning_rate": 7.789950538339813e-06, |
|
"loss": 0.8452, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.5333764553070068, |
|
"learning_rate": 7.751149705423313e-06, |
|
"loss": 0.8404, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5556795597076416, |
|
"learning_rate": 7.712109788226763e-06, |
|
"loss": 0.8142, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.168, |
|
"grad_norm": 0.6207058429718018, |
|
"learning_rate": 7.672834179451943e-06, |
|
"loss": 0.8746, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.176, |
|
"grad_norm": 0.5369215607643127, |
|
"learning_rate": 7.633326292283028e-06, |
|
"loss": 0.8469, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.184, |
|
"grad_norm": 0.5977884531021118, |
|
"learning_rate": 7.593589560089984e-06, |
|
"loss": 0.8328, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.192, |
|
"grad_norm": 0.6059456467628479, |
|
"learning_rate": 7.553627436130183e-06, |
|
"loss": 0.8582, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5836285948753357, |
|
"learning_rate": 7.513443393248312e-06, |
|
"loss": 0.8558, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.208, |
|
"grad_norm": 0.6144264936447144, |
|
"learning_rate": 7.473040923574567e-06, |
|
"loss": 0.8549, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.626693844795227, |
|
"learning_rate": 7.432423538221179e-06, |
|
"loss": 0.8473, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.224, |
|
"grad_norm": 0.6432238221168518, |
|
"learning_rate": 7.391594766977277e-06, |
|
"loss": 0.8281, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.232, |
|
"grad_norm": 0.6119612455368042, |
|
"learning_rate": 7.350558158002154e-06, |
|
"loss": 0.8419, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.6895710229873657, |
|
"learning_rate": 7.3093172775169e-06, |
|
"loss": 0.8408, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 0.6269011497497559, |
|
"learning_rate": 7.2678757094945e-06, |
|
"loss": 0.8345, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.256, |
|
"grad_norm": 0.5723068118095398, |
|
"learning_rate": 7.226237055348369e-06, |
|
"loss": 0.8654, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.264, |
|
"grad_norm": 0.6293651461601257, |
|
"learning_rate": 7.184404933619377e-06, |
|
"loss": 0.8387, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.272, |
|
"grad_norm": 0.6416763067245483, |
|
"learning_rate": 7.142382979661386e-06, |
|
"loss": 0.8317, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5021880865097046, |
|
"learning_rate": 7.100174845325327e-06, |
|
"loss": 0.8281, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.288, |
|
"grad_norm": 0.567487895488739, |
|
"learning_rate": 7.057784198641835e-06, |
|
"loss": 0.8646, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 0.5684318542480469, |
|
"learning_rate": 7.015214723502496e-06, |
|
"loss": 0.8293, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.304, |
|
"grad_norm": 0.5071728825569153, |
|
"learning_rate": 6.972470119339692e-06, |
|
"loss": 0.8097, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.312, |
|
"grad_norm": 0.6408015489578247, |
|
"learning_rate": 6.929554100805118e-06, |
|
"loss": 0.85, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5955577492713928, |
|
"learning_rate": 6.886470397446958e-06, |
|
"loss": 0.8343, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.328, |
|
"grad_norm": 0.4974721670150757, |
|
"learning_rate": 6.843222753385785e-06, |
|
"loss": 0.7749, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.336, |
|
"grad_norm": 0.6704075336456299, |
|
"learning_rate": 6.799814926989171e-06, |
|
"loss": 0.8165, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.5451616644859314, |
|
"learning_rate": 6.756250690545079e-06, |
|
"loss": 0.8349, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.3519999999999999, |
|
"grad_norm": 0.5713831782341003, |
|
"learning_rate": 6.712533829934042e-06, |
|
"loss": 0.8224, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.5288674831390381, |
|
"learning_rate": 6.6686681443001485e-06, |
|
"loss": 0.8518, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.3679999999999999, |
|
"grad_norm": 0.6580489873886108, |
|
"learning_rate": 6.62465744572089e-06, |
|
"loss": 0.8736, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.376, |
|
"grad_norm": 0.5576747059822083, |
|
"learning_rate": 6.580505558875878e-06, |
|
"loss": 0.8171, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.384, |
|
"grad_norm": 0.6031728386878967, |
|
"learning_rate": 6.536216320714466e-06, |
|
"loss": 0.8325, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 0.49704813957214355, |
|
"learning_rate": 6.491793580122301e-06, |
|
"loss": 0.8348, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5445089340209961, |
|
"learning_rate": 6.447241197586847e-06, |
|
"loss": 0.8679, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.5517768263816833, |
|
"learning_rate": 6.402563044861899e-06, |
|
"loss": 0.8414, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.416, |
|
"grad_norm": 0.5023953318595886, |
|
"learning_rate": 6.357763004631104e-06, |
|
"loss": 0.7706, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.424, |
|
"grad_norm": 0.5183501243591309, |
|
"learning_rate": 6.312844970170551e-06, |
|
"loss": 0.8145, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.432, |
|
"grad_norm": 0.5883236527442932, |
|
"learning_rate": 6.267812845010431e-06, |
|
"loss": 0.8071, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5993454456329346, |
|
"learning_rate": 6.2226705425958e-06, |
|
"loss": 0.8338, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.448, |
|
"grad_norm": 0.5319005250930786, |
|
"learning_rate": 6.177421985946499e-06, |
|
"loss": 0.856, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.456, |
|
"grad_norm": 0.6088922619819641, |
|
"learning_rate": 6.132071107316221e-06, |
|
"loss": 0.8405, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.464, |
|
"grad_norm": 0.565377950668335, |
|
"learning_rate": 6.0866218478507875e-06, |
|
"loss": 0.848, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.5202862024307251, |
|
"learning_rate": 6.041078157245649e-06, |
|
"loss": 0.8139, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5979906320571899, |
|
"learning_rate": 5.995443993402647e-06, |
|
"loss": 0.8267, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 0.5841382145881653, |
|
"learning_rate": 5.949723322086053e-06, |
|
"loss": 0.8011, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.496, |
|
"grad_norm": 0.5658543705940247, |
|
"learning_rate": 5.9039201165779315e-06, |
|
"loss": 0.786, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.504, |
|
"grad_norm": 0.5966721773147583, |
|
"learning_rate": 5.858038357332851e-06, |
|
"loss": 0.8083, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.512, |
|
"grad_norm": 0.5970141291618347, |
|
"learning_rate": 5.812082031631966e-06, |
|
"loss": 0.8421, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.5003347992897034, |
|
"learning_rate": 5.766055133236513e-06, |
|
"loss": 0.823, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.528, |
|
"grad_norm": 0.5702365040779114, |
|
"learning_rate": 5.7199616620407325e-06, |
|
"loss": 0.8309, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.6028835773468018, |
|
"learning_rate": 5.673805623724272e-06, |
|
"loss": 0.8088, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.544, |
|
"grad_norm": 0.5072699189186096, |
|
"learning_rate": 5.627591029404072e-06, |
|
"loss": 0.8199, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.552, |
|
"grad_norm": 0.5302557945251465, |
|
"learning_rate": 5.581321895285787e-06, |
|
"loss": 0.8194, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.48366543650627136, |
|
"learning_rate": 5.535002242314772e-06, |
|
"loss": 0.8176, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.568, |
|
"grad_norm": 0.6495940089225769, |
|
"learning_rate": 5.488636095826636e-06, |
|
"loss": 0.8078, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.576, |
|
"grad_norm": 0.4819113314151764, |
|
"learning_rate": 5.4422274851974356e-06, |
|
"loss": 0.8397, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 0.5706694722175598, |
|
"learning_rate": 5.395780443493508e-06, |
|
"loss": 0.8549, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.592, |
|
"grad_norm": 0.5659922957420349, |
|
"learning_rate": 5.34929900712098e-06, |
|
"loss": 0.7686, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5568146705627441, |
|
"learning_rate": 5.302787215474992e-06, |
|
"loss": 0.8329, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.608, |
|
"grad_norm": 0.5732328295707703, |
|
"learning_rate": 5.256249110588659e-06, |
|
"loss": 0.833, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.616, |
|
"grad_norm": 0.5560725927352905, |
|
"learning_rate": 5.209688736781811e-06, |
|
"loss": 0.812, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.624, |
|
"grad_norm": 0.49767476320266724, |
|
"learning_rate": 5.163110140309518e-06, |
|
"loss": 0.8408, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 0.5641460418701172, |
|
"learning_rate": 5.116517369010467e-06, |
|
"loss": 0.8294, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 0.6225250363349915, |
|
"learning_rate": 5.069914471955179e-06, |
|
"loss": 0.7811, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.6480000000000001, |
|
"grad_norm": 0.5748852491378784, |
|
"learning_rate": 5.023305499094145e-06, |
|
"loss": 0.8307, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.6560000000000001, |
|
"grad_norm": 0.5865118503570557, |
|
"learning_rate": 4.976694500905858e-06, |
|
"loss": 0.8046, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.5250575542449951, |
|
"learning_rate": 4.930085528044823e-06, |
|
"loss": 0.8315, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.6720000000000002, |
|
"grad_norm": 0.5795397162437439, |
|
"learning_rate": 4.883482630989536e-06, |
|
"loss": 0.8472, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.6399658918380737, |
|
"learning_rate": 4.8368898596904834e-06, |
|
"loss": 0.8163, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.688, |
|
"grad_norm": 0.5113149881362915, |
|
"learning_rate": 4.790311263218191e-06, |
|
"loss": 0.8362, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.696, |
|
"grad_norm": 0.4925471544265747, |
|
"learning_rate": 4.743750889411342e-06, |
|
"loss": 0.8364, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.704, |
|
"grad_norm": 0.5226654410362244, |
|
"learning_rate": 4.697212784525009e-06, |
|
"loss": 0.8156, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.712, |
|
"grad_norm": 0.49383535981178284, |
|
"learning_rate": 4.65070099287902e-06, |
|
"loss": 0.8363, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5424585342407227, |
|
"learning_rate": 4.604219556506492e-06, |
|
"loss": 0.8252, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.4512873589992523, |
|
"learning_rate": 4.557772514802564e-06, |
|
"loss": 0.8188, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.736, |
|
"grad_norm": 0.49828025698661804, |
|
"learning_rate": 4.511363904173366e-06, |
|
"loss": 0.835, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.744, |
|
"grad_norm": 0.5537604093551636, |
|
"learning_rate": 4.46499775768523e-06, |
|
"loss": 0.8072, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.752, |
|
"grad_norm": 0.509067714214325, |
|
"learning_rate": 4.418678104714214e-06, |
|
"loss": 0.8331, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.49603044986724854, |
|
"learning_rate": 4.372408970595931e-06, |
|
"loss": 0.8031, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.768, |
|
"grad_norm": 0.5341010689735413, |
|
"learning_rate": 4.326194376275729e-06, |
|
"loss": 0.8117, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 0.49610334634780884, |
|
"learning_rate": 4.280038337959268e-06, |
|
"loss": 0.8137, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.784, |
|
"grad_norm": 0.4939770996570587, |
|
"learning_rate": 4.2339448667634885e-06, |
|
"loss": 0.7834, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.5622074007987976, |
|
"learning_rate": 4.187917968368036e-06, |
|
"loss": 0.8227, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.6220787763595581, |
|
"learning_rate": 4.141961642667152e-06, |
|
"loss": 0.8526, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.808, |
|
"grad_norm": 0.5713546276092529, |
|
"learning_rate": 4.09607988342207e-06, |
|
"loss": 0.8529, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.8159999999999998, |
|
"grad_norm": 0.5482434034347534, |
|
"learning_rate": 4.0502766779139485e-06, |
|
"loss": 0.8142, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 0.6168124079704285, |
|
"learning_rate": 4.0045560065973535e-06, |
|
"loss": 0.8127, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.8319999999999999, |
|
"grad_norm": 0.5725027918815613, |
|
"learning_rate": 3.958921842754351e-06, |
|
"loss": 0.8405, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.5908339023590088, |
|
"learning_rate": 3.913378152149214e-06, |
|
"loss": 0.7938, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.8479999999999999, |
|
"grad_norm": 0.5032182931900024, |
|
"learning_rate": 3.86792889268378e-06, |
|
"loss": 0.8237, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.4820937514305115, |
|
"learning_rate": 3.8225780140535025e-06, |
|
"loss": 0.8157, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.8639999999999999, |
|
"grad_norm": 0.5697328448295593, |
|
"learning_rate": 3.777329457404202e-06, |
|
"loss": 0.8471, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 0.5110496282577515, |
|
"learning_rate": 3.7321871549895715e-06, |
|
"loss": 0.8184, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.4492500424385071, |
|
"learning_rate": 3.68715502982945e-06, |
|
"loss": 0.8048, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.888, |
|
"grad_norm": 0.5033578872680664, |
|
"learning_rate": 3.6422369953688973e-06, |
|
"loss": 0.8462, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.896, |
|
"grad_norm": 0.47552290558815, |
|
"learning_rate": 3.5974369551381023e-06, |
|
"loss": 0.814, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.904, |
|
"grad_norm": 0.47769391536712646, |
|
"learning_rate": 3.5527588024131542e-06, |
|
"loss": 0.8076, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.912, |
|
"grad_norm": 0.47970548272132874, |
|
"learning_rate": 3.5082064198777e-06, |
|
"loss": 0.8063, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5200381875038147, |
|
"learning_rate": 3.463783679285535e-06, |
|
"loss": 0.8212, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.928, |
|
"grad_norm": 0.4885690212249756, |
|
"learning_rate": 3.4194944411241213e-06, |
|
"loss": 0.8087, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.936, |
|
"grad_norm": 0.523141086101532, |
|
"learning_rate": 3.3753425542791106e-06, |
|
"loss": 0.8116, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.944, |
|
"grad_norm": 0.5129029750823975, |
|
"learning_rate": 3.3313318556998523e-06, |
|
"loss": 0.8062, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.952, |
|
"grad_norm": 0.4830564260482788, |
|
"learning_rate": 3.2874661700659586e-06, |
|
"loss": 0.8428, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.4503372609615326, |
|
"learning_rate": 3.2437493094549223e-06, |
|
"loss": 0.8268, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 0.4484092891216278, |
|
"learning_rate": 3.200185073010831e-06, |
|
"loss": 0.8086, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.976, |
|
"grad_norm": 0.41616371273994446, |
|
"learning_rate": 3.1567772466142156e-06, |
|
"loss": 0.8212, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.4492393732070923, |
|
"learning_rate": 3.1135296025530426e-06, |
|
"loss": 0.8217, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.992, |
|
"grad_norm": 0.48867785930633545, |
|
"learning_rate": 3.070445899194885e-06, |
|
"loss": 0.8394, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.5146781206130981, |
|
"learning_rate": 3.0275298806603102e-06, |
|
"loss": 0.8254, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.008, |
|
"grad_norm": 0.4940868616104126, |
|
"learning_rate": 2.984785276497507e-06, |
|
"loss": 0.7948, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.5990253686904907, |
|
"learning_rate": 2.9422158013581658e-06, |
|
"loss": 0.7709, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.024, |
|
"grad_norm": 0.52504962682724, |
|
"learning_rate": 2.899825154674674e-06, |
|
"loss": 0.7659, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.032, |
|
"grad_norm": 0.47968220710754395, |
|
"learning_rate": 2.8576170203386144e-06, |
|
"loss": 0.7978, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.5218414068222046, |
|
"learning_rate": 2.8155950663806234e-06, |
|
"loss": 0.8075, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.6022427082061768, |
|
"learning_rate": 2.7737629446516325e-06, |
|
"loss": 0.7967, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.056, |
|
"grad_norm": 0.45537319779396057, |
|
"learning_rate": 2.732124290505501e-06, |
|
"loss": 0.7637, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 0.47411370277404785, |
|
"learning_rate": 2.6906827224831024e-06, |
|
"loss": 0.7999, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.072, |
|
"grad_norm": 0.47417914867401123, |
|
"learning_rate": 2.6494418419978485e-06, |
|
"loss": 0.8002, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.47205376625061035, |
|
"learning_rate": 2.608405233022724e-06, |
|
"loss": 0.7841, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.088, |
|
"grad_norm": 0.4820186197757721, |
|
"learning_rate": 2.5675764617788233e-06, |
|
"loss": 0.7808, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.096, |
|
"grad_norm": 0.44489675760269165, |
|
"learning_rate": 2.526959076425434e-06, |
|
"loss": 0.7758, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.104, |
|
"grad_norm": 0.4890801012516022, |
|
"learning_rate": 2.4865566067516896e-06, |
|
"loss": 0.8111, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.4414970278739929, |
|
"learning_rate": 2.4463725638698182e-06, |
|
"loss": 0.7976, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.45259734988212585, |
|
"learning_rate": 2.406410439910017e-06, |
|
"loss": 0.7903, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.128, |
|
"grad_norm": 0.48778876662254333, |
|
"learning_rate": 2.366673707716973e-06, |
|
"loss": 0.8107, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.136, |
|
"grad_norm": 0.44795307517051697, |
|
"learning_rate": 2.327165820548059e-06, |
|
"loss": 0.7231, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.144, |
|
"grad_norm": 0.43515029549598694, |
|
"learning_rate": 2.287890211773238e-06, |
|
"loss": 0.7503, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.152, |
|
"grad_norm": 0.4571765661239624, |
|
"learning_rate": 2.2488502945766893e-06, |
|
"loss": 0.8206, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.4228002727031708, |
|
"learning_rate": 2.210049461660189e-06, |
|
"loss": 0.7817, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.168, |
|
"grad_norm": 0.4443894624710083, |
|
"learning_rate": 2.1714910849482777e-06, |
|
"loss": 0.7855, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.4512254297733307, |
|
"learning_rate": 2.1331785152952243e-06, |
|
"loss": 0.8024, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.184, |
|
"grad_norm": 0.5297703742980957, |
|
"learning_rate": 2.0951150821938278e-06, |
|
"loss": 0.7472, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.192, |
|
"grad_norm": 0.46972817182540894, |
|
"learning_rate": 2.0573040934860717e-06, |
|
"loss": 0.7666, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.48370957374572754, |
|
"learning_rate": 2.0197488350756618e-06, |
|
"loss": 0.7913, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 0.43513721227645874, |
|
"learning_rate": 1.98245257064247e-06, |
|
"loss": 0.7855, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.216, |
|
"grad_norm": 0.44624122977256775, |
|
"learning_rate": 1.945418541358911e-06, |
|
"loss": 0.7948, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.224, |
|
"grad_norm": 0.5190390944480896, |
|
"learning_rate": 1.9086499656082685e-06, |
|
"loss": 0.7953, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.232, |
|
"grad_norm": 0.4471817910671234, |
|
"learning_rate": 1.872150038705015e-06, |
|
"loss": 0.7723, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.4081578254699707, |
|
"learning_rate": 1.835921932617119e-06, |
|
"loss": 0.7863, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.248, |
|
"grad_norm": 0.4299153983592987, |
|
"learning_rate": 1.7999687956903955e-06, |
|
"loss": 0.7459, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 0.44286420941352844, |
|
"learning_rate": 1.7642937523749038e-06, |
|
"loss": 0.7549, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.2640000000000002, |
|
"grad_norm": 0.5079202651977539, |
|
"learning_rate": 1.7288999029534177e-06, |
|
"loss": 0.7945, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.2720000000000002, |
|
"grad_norm": 0.5463809967041016, |
|
"learning_rate": 1.6937903232720076e-06, |
|
"loss": 0.8004, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.4327748715877533, |
|
"learning_rate": 1.6589680644727347e-06, |
|
"loss": 0.7723, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.288, |
|
"grad_norm": 0.4358852505683899, |
|
"learning_rate": 1.6244361527284953e-06, |
|
"loss": 0.7721, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.296, |
|
"grad_norm": 0.41679710149765015, |
|
"learning_rate": 1.5901975889800387e-06, |
|
"loss": 0.7891, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.4202753007411957, |
|
"learning_rate": 1.556255348675174e-06, |
|
"loss": 0.7585, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.312, |
|
"grad_norm": 0.4579218029975891, |
|
"learning_rate": 1.522612381510195e-06, |
|
"loss": 0.7672, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.47101694345474243, |
|
"learning_rate": 1.489271611173538e-06, |
|
"loss": 0.7861, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.328, |
|
"grad_norm": 0.4235668182373047, |
|
"learning_rate": 1.4562359350917054e-06, |
|
"loss": 0.791, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.336, |
|
"grad_norm": 0.44197484850883484, |
|
"learning_rate": 1.423508224177474e-06, |
|
"loss": 0.7861, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.344, |
|
"grad_norm": 0.43885377049446106, |
|
"learning_rate": 1.3910913225803946e-06, |
|
"loss": 0.7965, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 0.4231855273246765, |
|
"learning_rate": 1.35898804743963e-06, |
|
"loss": 0.7815, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.48017239570617676, |
|
"learning_rate": 1.3272011886391368e-06, |
|
"loss": 0.8028, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.46980807185173035, |
|
"learning_rate": 1.295733508565213e-06, |
|
"loss": 0.7692, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.376, |
|
"grad_norm": 0.45487645268440247, |
|
"learning_rate": 1.2645877418664394e-06, |
|
"loss": 0.7546, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.384, |
|
"grad_norm": 0.44050565361976624, |
|
"learning_rate": 1.2337665952160266e-06, |
|
"loss": 0.798, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.392, |
|
"grad_norm": 0.4430494010448456, |
|
"learning_rate": 1.2032727470765982e-06, |
|
"loss": 0.7849, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.4752819836139679, |
|
"learning_rate": 1.1731088474674235e-06, |
|
"loss": 0.8045, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.408, |
|
"grad_norm": 0.4466467499732971, |
|
"learning_rate": 1.1432775177341165e-06, |
|
"loss": 0.7841, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.416, |
|
"grad_norm": 0.4148922562599182, |
|
"learning_rate": 1.11378135032084e-06, |
|
"loss": 0.7993, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.424, |
|
"grad_norm": 0.44224417209625244, |
|
"learning_rate": 1.08462290854501e-06, |
|
"loss": 0.7655, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.42925554513931274, |
|
"learning_rate": 1.0558047263745297e-06, |
|
"loss": 0.7607, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.4702661633491516, |
|
"learning_rate": 1.0273293082075914e-06, |
|
"loss": 0.7425, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 0.44665732979774475, |
|
"learning_rate": 9.991991286550207e-07, |
|
"loss": 0.8086, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.456, |
|
"grad_norm": 0.44039228558540344, |
|
"learning_rate": 9.71416632325235e-07, |
|
"loss": 0.7751, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.464, |
|
"grad_norm": 0.4385325610637665, |
|
"learning_rate": 9.439842336117954e-07, |
|
"loss": 0.8141, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.472, |
|
"grad_norm": 0.43111181259155273, |
|
"learning_rate": 9.169043164835867e-07, |
|
"loss": 0.762, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.4057021737098694, |
|
"learning_rate": 8.901792342776439e-07, |
|
"loss": 0.8327, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.488, |
|
"grad_norm": 0.4096888303756714, |
|
"learning_rate": 8.638113094946382e-07, |
|
"loss": 0.8251, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.4527164101600647, |
|
"learning_rate": 8.378028335970451e-07, |
|
"loss": 0.7605, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.504, |
|
"grad_norm": 0.4072956144809723, |
|
"learning_rate": 8.121560668100065e-07, |
|
"loss": 0.7836, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.512, |
|
"grad_norm": 0.4347890615463257, |
|
"learning_rate": 7.868732379249122e-07, |
|
"loss": 0.7888, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.44405418634414673, |
|
"learning_rate": 7.619565441057075e-07, |
|
"loss": 0.7476, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.528, |
|
"grad_norm": 0.4290863573551178, |
|
"learning_rate": 7.37408150697953e-07, |
|
"loss": 0.776, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.536, |
|
"grad_norm": 0.4163723289966583, |
|
"learning_rate": 7.132301910406503e-07, |
|
"loss": 0.7841, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 0.39156779646873474, |
|
"learning_rate": 6.894247662808456e-07, |
|
"loss": 0.8141, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.552, |
|
"grad_norm": 0.3718986511230469, |
|
"learning_rate": 6.659939451910341e-07, |
|
"loss": 0.8007, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.39346760511398315, |
|
"learning_rate": 6.429397639893758e-07, |
|
"loss": 0.7884, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.568, |
|
"grad_norm": 0.38905784487724304, |
|
"learning_rate": 6.202642261627411e-07, |
|
"loss": 0.7697, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.576, |
|
"grad_norm": 0.4061625301837921, |
|
"learning_rate": 5.979693022926025e-07, |
|
"loss": 0.7715, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.584, |
|
"grad_norm": 0.4345610737800598, |
|
"learning_rate": 5.760569298837825e-07, |
|
"loss": 0.785, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 0.3941546678543091, |
|
"learning_rate": 5.54529013196079e-07, |
|
"loss": 0.8025, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.36712539196014404, |
|
"learning_rate": 5.333874230787772e-07, |
|
"loss": 0.8007, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.608, |
|
"grad_norm": 0.3812052607536316, |
|
"learning_rate": 5.126339968080696e-07, |
|
"loss": 0.7889, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.616, |
|
"grad_norm": 0.4085177481174469, |
|
"learning_rate": 4.922705379273862e-07, |
|
"loss": 0.7641, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.42239484190940857, |
|
"learning_rate": 4.7229881609066387e-07, |
|
"loss": 0.7979, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.632, |
|
"grad_norm": 0.4121035635471344, |
|
"learning_rate": 4.5272056690855494e-07, |
|
"loss": 0.7858, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.38635125756263733, |
|
"learning_rate": 4.335374917975982e-07, |
|
"loss": 0.8095, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.648, |
|
"grad_norm": 0.38656213879585266, |
|
"learning_rate": 4.147512578323615e-07, |
|
"loss": 0.7669, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.656, |
|
"grad_norm": 0.41909682750701904, |
|
"learning_rate": 3.9636349760056427e-07, |
|
"loss": 0.7788, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.664, |
|
"grad_norm": 0.4400654137134552, |
|
"learning_rate": 3.783758090611983e-07, |
|
"loss": 0.7382, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.672, |
|
"grad_norm": 0.3858467638492584, |
|
"learning_rate": 3.6078975540566716e-07, |
|
"loss": 0.767, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.36701512336730957, |
|
"learning_rate": 3.4360686492193263e-07, |
|
"loss": 0.7928, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.38759228587150574, |
|
"learning_rate": 3.268286308617041e-07, |
|
"loss": 0.8181, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.6959999999999997, |
|
"grad_norm": 0.38279959559440613, |
|
"learning_rate": 3.104565113106689e-07, |
|
"loss": 0.7943, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.7039999999999997, |
|
"grad_norm": 0.4579544961452484, |
|
"learning_rate": 2.9449192906178205e-07, |
|
"loss": 0.7605, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.7119999999999997, |
|
"grad_norm": 0.40908747911453247, |
|
"learning_rate": 2.789362714916172e-07, |
|
"loss": 0.8095, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.37257906794548035, |
|
"learning_rate": 2.6379089043980064e-07, |
|
"loss": 0.7988, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.7279999999999998, |
|
"grad_norm": 0.431416779756546, |
|
"learning_rate": 2.4905710209153224e-07, |
|
"loss": 0.7819, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 0.4172951579093933, |
|
"learning_rate": 2.3473618686320477e-07, |
|
"loss": 0.7597, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.7439999999999998, |
|
"grad_norm": 0.41569289565086365, |
|
"learning_rate": 2.208293892911284e-07, |
|
"loss": 0.7797, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.42253097891807556, |
|
"learning_rate": 2.0733791792338197e-07, |
|
"loss": 0.787, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.36844950914382935, |
|
"learning_rate": 1.9426294521477874e-07, |
|
"loss": 0.8194, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.768, |
|
"grad_norm": 0.4011365473270416, |
|
"learning_rate": 1.8160560742498223e-07, |
|
"loss": 0.7881, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.776, |
|
"grad_norm": 0.44703608751296997, |
|
"learning_rate": 1.6936700451975818e-07, |
|
"loss": 0.7567, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 0.39994892477989197, |
|
"learning_rate": 1.5754820007538473e-07, |
|
"loss": 0.7683, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.792, |
|
"grad_norm": 0.4645727872848511, |
|
"learning_rate": 1.461502211862237e-07, |
|
"loss": 0.7094, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.3852178156375885, |
|
"learning_rate": 1.3517405837546404e-07, |
|
"loss": 0.8263, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.808, |
|
"grad_norm": 0.4058782160282135, |
|
"learning_rate": 1.2462066550903818e-07, |
|
"loss": 0.7728, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.4285104274749756, |
|
"learning_rate": 1.1449095971273305e-07, |
|
"loss": 0.8048, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.824, |
|
"grad_norm": 0.39162999391555786, |
|
"learning_rate": 1.0478582129248516e-07, |
|
"loss": 0.8227, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 0.39359840750694275, |
|
"learning_rate": 9.550609365787888e-08, |
|
"loss": 0.7782, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.4036370813846588, |
|
"learning_rate": 8.66525832488535e-08, |
|
"loss": 0.7619, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.848, |
|
"grad_norm": 0.42931556701660156, |
|
"learning_rate": 7.822605946561923e-08, |
|
"loss": 0.7714, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.856, |
|
"grad_norm": 0.4127103090286255, |
|
"learning_rate": 7.022725460179459e-08, |
|
"loss": 0.7252, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.864, |
|
"grad_norm": 0.3982616364955902, |
|
"learning_rate": 6.265686378076729e-08, |
|
"loss": 0.8099, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.872, |
|
"grad_norm": 0.4003240168094635, |
|
"learning_rate": 5.5515544895284324e-08, |
|
"loss": 0.7717, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3641519844532013, |
|
"learning_rate": 4.880391855028088e-08, |
|
"loss": 0.7815, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.888, |
|
"grad_norm": 0.3692556619644165, |
|
"learning_rate": 4.252256800894694e-08, |
|
"loss": 0.8232, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.896, |
|
"grad_norm": 0.3871900141239166, |
|
"learning_rate": 3.6672039142039426e-08, |
|
"loss": 0.7802, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.904, |
|
"grad_norm": 0.39646491408348083, |
|
"learning_rate": 3.125284038044407e-08, |
|
"loss": 0.7824, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.912, |
|
"grad_norm": 0.40282607078552246, |
|
"learning_rate": 2.6265442670991293e-08, |
|
"loss": 0.7715, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.4330500662326813, |
|
"learning_rate": 2.1710279435530058e-08, |
|
"loss": 0.7895, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 0.3595670461654663, |
|
"learning_rate": 1.7587746533260786e-08, |
|
"loss": 0.8252, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.936, |
|
"grad_norm": 0.41389673948287964, |
|
"learning_rate": 1.3898202226333424e-08, |
|
"loss": 0.7914, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.42384371161460876, |
|
"learning_rate": 1.0641967148716236e-08, |
|
"loss": 0.775, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.952, |
|
"grad_norm": 0.4105876088142395, |
|
"learning_rate": 7.819324278328099e-09, |
|
"loss": 0.8268, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.4283929467201233, |
|
"learning_rate": 5.430518912448169e-09, |
|
"loss": 0.785, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.968, |
|
"grad_norm": 0.4095473289489746, |
|
"learning_rate": 3.4757586464001513e-09, |
|
"loss": 0.7683, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 0.38301780819892883, |
|
"learning_rate": 1.9552133555084117e-09, |
|
"loss": 0.7837, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.984, |
|
"grad_norm": 0.4291246831417084, |
|
"learning_rate": 8.690151803386615e-10, |
|
"loss": 0.7784, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.992, |
|
"grad_norm": 0.4616396725177765, |
|
"learning_rate": 2.1725851521103847e-10, |
|
"loss": 0.753, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.4092578887939453, |
|
"learning_rate": 0.0, |
|
"loss": 0.7939, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 375, |
|
"total_flos": 372577161248768.0, |
|
"train_loss": 0.8553513642946879, |
|
"train_runtime": 20932.817, |
|
"train_samples_per_second": 1.72, |
|
"train_steps_per_second": 0.018 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 375, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 372577161248768.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|