|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.988, |
|
"eval_steps": 500, |
|
"global_step": 249, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012, |
|
"grad_norm": 6.98012638092041, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.2874, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 7.2280497550964355, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.3379, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.036, |
|
"grad_norm": 6.49622106552124, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.2253, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 6.694257736206055, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.2606, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.282245635986328, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.2299, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 4.9895453453063965, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.1999, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.084, |
|
"grad_norm": 4.745405673980713, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.2123, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 2.9413177967071533, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.2145, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.108, |
|
"grad_norm": 2.6902289390563965, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.1699, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5415780544281006, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1446, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.132, |
|
"grad_norm": 4.769739627838135, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.1619, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 4.757033824920654, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.1448, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.156, |
|
"grad_norm": 4.72186803817749, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.1301, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.168, |
|
"grad_norm": 3.7590582370758057, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.1112, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.620077133178711, |
|
"learning_rate": 6e-06, |
|
"loss": 1.1117, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 3.142825126647949, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.0957, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.204, |
|
"grad_norm": 2.2360799312591553, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.0306, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.216, |
|
"grad_norm": 1.9428048133850098, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.069, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.228, |
|
"grad_norm": 1.7547845840454102, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.0052, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.657777190208435, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.0323, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.252, |
|
"grad_norm": 1.529606819152832, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.0235, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.264, |
|
"grad_norm": 1.401602029800415, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.0052, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.276, |
|
"grad_norm": 1.3033355474472046, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.9629, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 1.35280442237854, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.9827, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1778582334518433, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9396, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.312, |
|
"grad_norm": 1.0478476285934448, |
|
"learning_rate": 9.999508258797876e-06, |
|
"loss": 0.9898, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.324, |
|
"grad_norm": 1.2109336853027344, |
|
"learning_rate": 9.998033131915266e-06, |
|
"loss": 0.9925, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 1.2328909635543823, |
|
"learning_rate": 9.995574909504434e-06, |
|
"loss": 0.9736, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.348, |
|
"grad_norm": 1.0876905918121338, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.946, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9405947327613831, |
|
"learning_rate": 9.987711305469232e-06, |
|
"loss": 0.9901, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.372, |
|
"grad_norm": 0.8621402382850647, |
|
"learning_rate": 9.982307470588097e-06, |
|
"loss": 0.9884, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.858891487121582, |
|
"learning_rate": 9.975923633360985e-06, |
|
"loss": 0.9645, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.396, |
|
"grad_norm": 0.9253868460655212, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.9543, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.408, |
|
"grad_norm": 0.8706124424934387, |
|
"learning_rate": 9.960221167098124e-06, |
|
"loss": 0.9672, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.731913149356842, |
|
"learning_rate": 9.950905626682229e-06, |
|
"loss": 0.9399, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 0.7874249815940857, |
|
"learning_rate": 9.940616260552545e-06, |
|
"loss": 0.9548, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.444, |
|
"grad_norm": 0.8237317800521851, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.959, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.456, |
|
"grad_norm": 0.812666118144989, |
|
"learning_rate": 9.917124337830242e-06, |
|
"loss": 0.9314, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.468, |
|
"grad_norm": 0.681806743144989, |
|
"learning_rate": 9.903926402016153e-06, |
|
"loss": 0.9564, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7917366623878479, |
|
"learning_rate": 9.889763881136439e-06, |
|
"loss": 0.958, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.492, |
|
"grad_norm": 0.8405086398124695, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.9135, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.504, |
|
"grad_norm": 0.7085656523704529, |
|
"learning_rate": 9.858556416234755e-06, |
|
"loss": 0.9224, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.516, |
|
"grad_norm": 0.6934413909912109, |
|
"learning_rate": 9.841517610611309e-06, |
|
"loss": 0.9147, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 0.8959766626358032, |
|
"learning_rate": 9.82352649551188e-06, |
|
"loss": 0.9471, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.643950879573822, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.9136, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.552, |
|
"grad_norm": 0.6084837913513184, |
|
"learning_rate": 9.784701678661045e-06, |
|
"loss": 0.9142, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.564, |
|
"grad_norm": 0.7732555866241455, |
|
"learning_rate": 9.763875613614482e-06, |
|
"loss": 0.887, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.617849588394165, |
|
"learning_rate": 9.742112510999516e-06, |
|
"loss": 0.9138, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.588, |
|
"grad_norm": 0.8307777643203735, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.9335, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.711719810962677, |
|
"learning_rate": 9.69579249943714e-06, |
|
"loss": 0.9457, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.612, |
|
"grad_norm": 0.6059475541114807, |
|
"learning_rate": 9.671244701472999e-06, |
|
"loss": 0.9146, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 0.6742075085639954, |
|
"learning_rate": 9.645778086114892e-06, |
|
"loss": 0.9178, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.636, |
|
"grad_norm": 0.6479842662811279, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.908, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.648, |
|
"grad_norm": 0.6552858948707581, |
|
"learning_rate": 9.592108619734107e-06, |
|
"loss": 0.953, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.637143075466156, |
|
"learning_rate": 9.563916325306595e-06, |
|
"loss": 0.8592, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.649308979511261, |
|
"learning_rate": 9.534826324599002e-06, |
|
"loss": 0.9176, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.684, |
|
"grad_norm": 0.9127522706985474, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.9613, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.696, |
|
"grad_norm": 0.6097024083137512, |
|
"learning_rate": 9.473976267396831e-06, |
|
"loss": 0.9182, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.708, |
|
"grad_norm": 0.7733173370361328, |
|
"learning_rate": 9.442228179894362e-06, |
|
"loss": 0.9183, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6656049489974976, |
|
"learning_rate": 9.409606321741776e-06, |
|
"loss": 0.9708, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.732, |
|
"grad_norm": 0.6555663347244263, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.9074, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.744, |
|
"grad_norm": 0.6316003203392029, |
|
"learning_rate": 9.341767130510529e-06, |
|
"loss": 0.8991, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.756, |
|
"grad_norm": 0.6999967694282532, |
|
"learning_rate": 9.306563141162046e-06, |
|
"loss": 0.8979, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.6841825842857361, |
|
"learning_rate": 9.270512065999139e-06, |
|
"loss": 0.8945, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.6653499603271484, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.9179, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.792, |
|
"grad_norm": 0.6969425082206726, |
|
"learning_rate": 9.195897187932513e-06, |
|
"loss": 0.8887, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.804, |
|
"grad_norm": 0.6159932017326355, |
|
"learning_rate": 9.157348061512728e-06, |
|
"loss": 0.8882, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 0.6578161120414734, |
|
"learning_rate": 9.117981199359575e-06, |
|
"loss": 0.9022, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.828, |
|
"grad_norm": 0.5723689794540405, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.8962, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.670845627784729, |
|
"learning_rate": 9.036825400468814e-06, |
|
"loss": 0.8789, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.852, |
|
"grad_norm": 0.6901213526725769, |
|
"learning_rate": 8.995052426791247e-06, |
|
"loss": 0.8723, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.5967451333999634, |
|
"learning_rate": 8.952493640360518e-06, |
|
"loss": 0.9267, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.876, |
|
"grad_norm": 0.6454184651374817, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.8762, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.888, |
|
"grad_norm": 0.6845690011978149, |
|
"learning_rate": 8.865052266813686e-06, |
|
"loss": 0.9348, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.616240918636322, |
|
"learning_rate": 8.820186879108038e-06, |
|
"loss": 0.9024, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 0.660294234752655, |
|
"learning_rate": 8.77457007408708e-06, |
|
"loss": 0.8927, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.924, |
|
"grad_norm": 0.6262758374214172, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.9414, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.936, |
|
"grad_norm": 0.6183608174324036, |
|
"learning_rate": 8.681118248795548e-06, |
|
"loss": 0.9135, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.948, |
|
"grad_norm": 0.6649749875068665, |
|
"learning_rate": 8.633301610170136e-06, |
|
"loss": 0.9005, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6269786357879639, |
|
"learning_rate": 8.584770313904138e-06, |
|
"loss": 0.8882, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.972, |
|
"grad_norm": 0.5906413197517395, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.8829, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.984, |
|
"grad_norm": 0.581932544708252, |
|
"learning_rate": 8.485602070884118e-06, |
|
"loss": 0.9358, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.996, |
|
"grad_norm": 0.689324676990509, |
|
"learning_rate": 8.43498463017451e-06, |
|
"loss": 0.9051, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 1.0778684616088867, |
|
"learning_rate": 8.383691540076372e-06, |
|
"loss": 1.5002, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.7571648359298706, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.826, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.032, |
|
"grad_norm": 0.8757691979408264, |
|
"learning_rate": 8.279118899309121e-06, |
|
"loss": 0.9041, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.044, |
|
"grad_norm": 0.5550661683082581, |
|
"learning_rate": 8.22585991771044e-06, |
|
"loss": 0.8642, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 0.713104784488678, |
|
"learning_rate": 8.171966420818227e-06, |
|
"loss": 0.8225, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.068, |
|
"grad_norm": 0.6943645477294922, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.9152, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.5287421941757202, |
|
"learning_rate": 8.062318406519751e-06, |
|
"loss": 0.7907, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.092, |
|
"grad_norm": 0.6482039093971252, |
|
"learning_rate": 8.00658545649203e-06, |
|
"loss": 0.9164, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 0.6448190808296204, |
|
"learning_rate": 7.950261121685642e-06, |
|
"loss": 0.8928, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.116, |
|
"grad_norm": 0.5866568684577942, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.8455, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.1280000000000001, |
|
"grad_norm": 0.5920262932777405, |
|
"learning_rate": 7.835882727074779e-06, |
|
"loss": 0.7902, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.1400000000000001, |
|
"grad_norm": 0.6534630656242371, |
|
"learning_rate": 7.777851165098012e-06, |
|
"loss": 0.917, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.6771339774131775, |
|
"learning_rate": 7.719273209572745e-06, |
|
"loss": 0.94, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.164, |
|
"grad_norm": 0.6356242299079895, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.7936, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.176, |
|
"grad_norm": 0.5693090558052063, |
|
"learning_rate": 7.600524311394873e-06, |
|
"loss": 0.8666, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.188, |
|
"grad_norm": 0.7795962691307068, |
|
"learning_rate": 7.540376726232648e-06, |
|
"loss": 0.8397, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.6267092227935791, |
|
"learning_rate": 7.4797294579083405e-06, |
|
"loss": 0.8552, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.212, |
|
"grad_norm": 0.657116174697876, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.8907, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.224, |
|
"grad_norm": 0.5222663879394531, |
|
"learning_rate": 7.3569836841299905e-06, |
|
"loss": 0.8099, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.236, |
|
"grad_norm": 0.5663891434669495, |
|
"learning_rate": 7.294909322337689e-06, |
|
"loss": 0.7832, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 0.5922184586524963, |
|
"learning_rate": 7.232383559957815e-06, |
|
"loss": 0.9434, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5749194025993347, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.8623, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.272, |
|
"grad_norm": 0.6411036252975464, |
|
"learning_rate": 7.106027114194856e-06, |
|
"loss": 0.8438, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.284, |
|
"grad_norm": 0.6471683979034424, |
|
"learning_rate": 7.042221284679982e-06, |
|
"loss": 0.8188, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 0.6250800490379333, |
|
"learning_rate": 6.978013757425295e-06, |
|
"loss": 0.9154, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.308, |
|
"grad_norm": 0.6909330487251282, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.8314, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5431961417198181, |
|
"learning_rate": 6.848444203803476e-06, |
|
"loss": 0.823, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.332, |
|
"grad_norm": 0.6324174404144287, |
|
"learning_rate": 6.783107663311566e-06, |
|
"loss": 0.8475, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.5820900201797485, |
|
"learning_rate": 6.717420391817306e-06, |
|
"loss": 0.8092, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.3559999999999999, |
|
"grad_norm": 0.5760152339935303, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.8584, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.3679999999999999, |
|
"grad_norm": 0.6144810914993286, |
|
"learning_rate": 6.585045404088442e-06, |
|
"loss": 0.7915, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.5397148728370667, |
|
"learning_rate": 6.518383725548074e-06, |
|
"loss": 0.9026, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 0.5049560070037842, |
|
"learning_rate": 6.451423386272312e-06, |
|
"loss": 0.84, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.404, |
|
"grad_norm": 0.51853346824646, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.7698, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.416, |
|
"grad_norm": 0.5880471467971802, |
|
"learning_rate": 6.3166594651218235e-06, |
|
"loss": 0.9143, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.428, |
|
"grad_norm": 0.612112283706665, |
|
"learning_rate": 6.248882390836135e-06, |
|
"loss": 0.8552, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.46327412128448486, |
|
"learning_rate": 6.180859665779173e-06, |
|
"loss": 0.808, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.452, |
|
"grad_norm": 0.6470435857772827, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.9172, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.464, |
|
"grad_norm": 0.526380181312561, |
|
"learning_rate": 6.04413082836085e-06, |
|
"loss": 0.8035, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.476, |
|
"grad_norm": 0.508842945098877, |
|
"learning_rate": 5.975451610080643e-06, |
|
"loss": 0.7913, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 0.5358572602272034, |
|
"learning_rate": 5.906580523901493e-06, |
|
"loss": 0.8627, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.573665201663971, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.8117, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.512, |
|
"grad_norm": 0.55750972032547, |
|
"learning_rate": 5.768316969722651e-06, |
|
"loss": 0.7876, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.524, |
|
"grad_norm": 0.4913860261440277, |
|
"learning_rate": 5.698951697677498e-06, |
|
"loss": 0.7882, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.682537853717804, |
|
"learning_rate": 5.629448944293128e-06, |
|
"loss": 0.8077, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.548, |
|
"grad_norm": 0.6094300150871277, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.9296, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.47651195526123047, |
|
"learning_rate": 5.490085701647805e-06, |
|
"loss": 0.8909, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.572, |
|
"grad_norm": 0.5251836180686951, |
|
"learning_rate": 5.420252624646238e-06, |
|
"loss": 0.8243, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 0.6414013504981995, |
|
"learning_rate": 5.350336885432337e-06, |
|
"loss": 0.9343, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.596, |
|
"grad_norm": 0.5134192109107971, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.8446, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.608, |
|
"grad_norm": 0.4695882499217987, |
|
"learning_rate": 5.210312442641327e-06, |
|
"loss": 0.825, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5449735522270203, |
|
"learning_rate": 5.140231281379345e-06, |
|
"loss": 0.906, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 0.4647942781448364, |
|
"learning_rate": 5.070122537117812e-06, |
|
"loss": 0.7803, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.6440000000000001, |
|
"grad_norm": 0.49307671189308167, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8701, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.6560000000000001, |
|
"grad_norm": 0.45072510838508606, |
|
"learning_rate": 4.92987746288219e-06, |
|
"loss": 0.7693, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.6680000000000001, |
|
"grad_norm": 0.6335264444351196, |
|
"learning_rate": 4.859768718620656e-06, |
|
"loss": 0.9239, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.5734366178512573, |
|
"learning_rate": 4.789687557358676e-06, |
|
"loss": 0.8102, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.692, |
|
"grad_norm": 0.5393184423446655, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.8653, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.704, |
|
"grad_norm": 0.6001816391944885, |
|
"learning_rate": 4.649663114567663e-06, |
|
"loss": 0.9439, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.716, |
|
"grad_norm": 0.5139857530593872, |
|
"learning_rate": 4.579747375353763e-06, |
|
"loss": 0.8445, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.4925212264060974, |
|
"learning_rate": 4.509914298352197e-06, |
|
"loss": 0.7356, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5880061388015747, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.9677, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.752, |
|
"grad_norm": 0.6019755601882935, |
|
"learning_rate": 4.3705510557068746e-06, |
|
"loss": 0.8576, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.764, |
|
"grad_norm": 0.47299277782440186, |
|
"learning_rate": 4.3010483023225045e-06, |
|
"loss": 0.8079, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 0.48285701870918274, |
|
"learning_rate": 4.231683030277349e-06, |
|
"loss": 0.8219, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.788, |
|
"grad_norm": 0.5679399967193604, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.8778, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5238530039787292, |
|
"learning_rate": 4.0934194760985095e-06, |
|
"loss": 0.8278, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.812, |
|
"grad_norm": 0.5001329779624939, |
|
"learning_rate": 4.02454838991936e-06, |
|
"loss": 0.8468, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 0.4934837520122528, |
|
"learning_rate": 3.955869171639151e-06, |
|
"loss": 0.8676, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.8359999999999999, |
|
"grad_norm": 0.4868137836456299, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.6974, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.8479999999999999, |
|
"grad_norm": 0.5343292951583862, |
|
"learning_rate": 3.81914033422083e-06, |
|
"loss": 0.9395, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.8599999999999999, |
|
"grad_norm": 0.4892309308052063, |
|
"learning_rate": 3.751117609163865e-06, |
|
"loss": 0.7526, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 0.5625379085540771, |
|
"learning_rate": 3.683340534878176e-06, |
|
"loss": 0.8781, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.884, |
|
"grad_norm": 0.5415952801704407, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.8064, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.896, |
|
"grad_norm": 0.5701708197593689, |
|
"learning_rate": 3.5485766137276894e-06, |
|
"loss": 0.8821, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.908, |
|
"grad_norm": 0.5524731874465942, |
|
"learning_rate": 3.4816162744519266e-06, |
|
"loss": 0.8881, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.48896679282188416, |
|
"learning_rate": 3.4149545959115604e-06, |
|
"loss": 0.8135, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.932, |
|
"grad_norm": 0.4823067784309387, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.7966, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.944, |
|
"grad_norm": 0.5454317927360535, |
|
"learning_rate": 3.2825796081826943e-06, |
|
"loss": 0.8471, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.956, |
|
"grad_norm": 0.5044663548469543, |
|
"learning_rate": 3.216892336688435e-06, |
|
"loss": 0.8319, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 0.500585675239563, |
|
"learning_rate": 3.1515557961965254e-06, |
|
"loss": 0.9108, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.4640965163707733, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.8201, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.992, |
|
"grad_norm": 0.5118164420127869, |
|
"learning_rate": 3.021986242574707e-06, |
|
"loss": 0.8368, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.004, |
|
"grad_norm": 1.088891625404358, |
|
"learning_rate": 2.95777871532002e-06, |
|
"loss": 1.361, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.47330203652381897, |
|
"learning_rate": 2.893972885805148e-06, |
|
"loss": 0.7807, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.028, |
|
"grad_norm": 0.5352301597595215, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.7914, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.580504298210144, |
|
"learning_rate": 2.7676164400421864e-06, |
|
"loss": 0.8962, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.052, |
|
"grad_norm": 0.5661837458610535, |
|
"learning_rate": 2.705090677662311e-06, |
|
"loss": 0.7885, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 0.45867210626602173, |
|
"learning_rate": 2.6430163158700116e-06, |
|
"loss": 0.6953, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.076, |
|
"grad_norm": 0.5421217679977417, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.8564, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.088, |
|
"grad_norm": 0.5122601389884949, |
|
"learning_rate": 2.520270542091663e-06, |
|
"loss": 0.7943, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.5720393657684326, |
|
"learning_rate": 2.4596232737673544e-06, |
|
"loss": 0.7948, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.5577499270439148, |
|
"learning_rate": 2.3994756886051267e-06, |
|
"loss": 0.8502, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.124, |
|
"grad_norm": 0.5329925417900085, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.7862, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.136, |
|
"grad_norm": 0.5803031921386719, |
|
"learning_rate": 2.280726790427258e-06, |
|
"loss": 0.8887, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.148, |
|
"grad_norm": 0.45233362913131714, |
|
"learning_rate": 2.2221488349019903e-06, |
|
"loss": 0.813, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.4876217842102051, |
|
"learning_rate": 2.1641172729252206e-06, |
|
"loss": 0.7921, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.172, |
|
"grad_norm": 0.5214042663574219, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.8221, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.184, |
|
"grad_norm": 0.5316967368125916, |
|
"learning_rate": 2.04973887831436e-06, |
|
"loss": 0.7736, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.196, |
|
"grad_norm": 0.4661819338798523, |
|
"learning_rate": 1.9934145435079705e-06, |
|
"loss": 0.8011, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 0.46781396865844727, |
|
"learning_rate": 1.9376815934802496e-06, |
|
"loss": 0.8939, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.47688379883766174, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.7347, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.232, |
|
"grad_norm": 0.4810101389884949, |
|
"learning_rate": 1.8280335791817733e-06, |
|
"loss": 0.8875, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.2439999999999998, |
|
"grad_norm": 0.4670160114765167, |
|
"learning_rate": 1.7741400822895633e-06, |
|
"loss": 0.7385, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 0.4591081738471985, |
|
"learning_rate": 1.7208811006908798e-06, |
|
"loss": 0.8053, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.268, |
|
"grad_norm": 0.44285574555397034, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.8803, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.4147845208644867, |
|
"learning_rate": 1.6163084599236278e-06, |
|
"loss": 0.7583, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.292, |
|
"grad_norm": 0.4563939869403839, |
|
"learning_rate": 1.5650153698254916e-06, |
|
"loss": 0.8772, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.46312814950942993, |
|
"learning_rate": 1.514397929115884e-06, |
|
"loss": 0.72, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.316, |
|
"grad_norm": 0.5045288801193237, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.8059, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.328, |
|
"grad_norm": 0.4324779510498047, |
|
"learning_rate": 1.4152296860958641e-06, |
|
"loss": 0.7998, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.4602769613265991, |
|
"learning_rate": 1.3666983898298659e-06, |
|
"loss": 0.7974, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 0.40926551818847656, |
|
"learning_rate": 1.3188817512044544e-06, |
|
"loss": 0.7687, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.364, |
|
"grad_norm": 0.4703604280948639, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.9035, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.376, |
|
"grad_norm": 0.4227948784828186, |
|
"learning_rate": 1.225429925912921e-06, |
|
"loss": 0.7542, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.388, |
|
"grad_norm": 0.4166393280029297, |
|
"learning_rate": 1.1798131208919628e-06, |
|
"loss": 0.8088, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.38077840209007263, |
|
"learning_rate": 1.134947733186315e-06, |
|
"loss": 0.6584, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.412, |
|
"grad_norm": 0.42249420285224915, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.8569, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.424, |
|
"grad_norm": 0.49399280548095703, |
|
"learning_rate": 1.047506359639483e-06, |
|
"loss": 0.8836, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.436, |
|
"grad_norm": 0.4305580258369446, |
|
"learning_rate": 1.004947573208756e-06, |
|
"loss": 0.7372, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 0.49399375915527344, |
|
"learning_rate": 9.631745995311881e-07, |
|
"loss": 0.7824, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.3984535336494446, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.7465, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.472, |
|
"grad_norm": 0.36112886667251587, |
|
"learning_rate": 8.820188006404268e-07, |
|
"loss": 0.694, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.484, |
|
"grad_norm": 0.45247265696525574, |
|
"learning_rate": 8.426519384872733e-07, |
|
"loss": 0.8708, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.38195085525512695, |
|
"learning_rate": 8.041028120674894e-07, |
|
"loss": 0.7514, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.508, |
|
"grad_norm": 0.4380353093147278, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.8477, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.42108631134033203, |
|
"learning_rate": 7.294879340008632e-07, |
|
"loss": 0.8247, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.532, |
|
"grad_norm": 0.4263460636138916, |
|
"learning_rate": 6.934368588379553e-07, |
|
"loss": 0.8271, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 0.48352324962615967, |
|
"learning_rate": 6.582328694894729e-07, |
|
"loss": 0.8894, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.556, |
|
"grad_norm": 0.37158602476119995, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.7195, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.568, |
|
"grad_norm": 0.3880118131637573, |
|
"learning_rate": 5.903936782582253e-07, |
|
"loss": 0.7907, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.40234553813934326, |
|
"learning_rate": 5.577718201056392e-07, |
|
"loss": 0.8158, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 0.42842331528663635, |
|
"learning_rate": 5.260237326031698e-07, |
|
"loss": 0.7774, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.604, |
|
"grad_norm": 0.4132654070854187, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.7719, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.616, |
|
"grad_norm": 0.4403928518295288, |
|
"learning_rate": 4.651736754009972e-07, |
|
"loss": 0.7448, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.628, |
|
"grad_norm": 0.4745451509952545, |
|
"learning_rate": 4.3608367469340553e-07, |
|
"loss": 0.9587, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.3818591833114624, |
|
"learning_rate": 4.078913802658946e-07, |
|
"loss": 0.7279, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.652, |
|
"grad_norm": 0.4163016974925995, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.7954, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.664, |
|
"grad_norm": 0.37762609124183655, |
|
"learning_rate": 3.542219138851094e-07, |
|
"loss": 0.8308, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.676, |
|
"grad_norm": 0.4249165654182434, |
|
"learning_rate": 3.287552985270015e-07, |
|
"loss": 0.7808, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.43331316113471985, |
|
"learning_rate": 3.0420750056286195e-07, |
|
"loss": 0.8398, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.38995715975761414, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.707, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.7119999999999997, |
|
"grad_norm": 0.43745648860931396, |
|
"learning_rate": 2.5788748900048676e-07, |
|
"loss": 0.8001, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.724, |
|
"grad_norm": 0.3968169093132019, |
|
"learning_rate": 2.3612438638551837e-07, |
|
"loss": 0.8116, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 0.43742409348487854, |
|
"learning_rate": 2.152983213389559e-07, |
|
"loss": 0.776, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.748, |
|
"grad_norm": 0.39760884642601013, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.8247, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.40744730830192566, |
|
"learning_rate": 1.7647350448812105e-07, |
|
"loss": 0.8348, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.7720000000000002, |
|
"grad_norm": 0.36414623260498047, |
|
"learning_rate": 1.5848238938869332e-07, |
|
"loss": 0.7256, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 0.42025619745254517, |
|
"learning_rate": 1.4144358376524504e-07, |
|
"loss": 0.8586, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.7960000000000003, |
|
"grad_norm": 0.4019131362438202, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.7834, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.808, |
|
"grad_norm": 0.43940842151641846, |
|
"learning_rate": 1.10236118863562e-07, |
|
"loss": 0.8294, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.4242531657218933, |
|
"learning_rate": 9.607359798384785e-08, |
|
"loss": 0.7621, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 0.4360555112361908, |
|
"learning_rate": 8.287566216975795e-08, |
|
"loss": 0.8652, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.844, |
|
"grad_norm": 0.38368791341781616, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.7203, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.856, |
|
"grad_norm": 0.4965880215167999, |
|
"learning_rate": 5.938373944745612e-08, |
|
"loss": 0.8733, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.868, |
|
"grad_norm": 0.43186068534851074, |
|
"learning_rate": 4.909437331777178e-08, |
|
"loss": 0.7493, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3746281862258911, |
|
"learning_rate": 3.977883290187667e-08, |
|
"loss": 0.7482, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.892, |
|
"grad_norm": 0.42593878507614136, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.8974, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.904, |
|
"grad_norm": 0.3988572657108307, |
|
"learning_rate": 2.4076366639015914e-08, |
|
"loss": 0.7592, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.916, |
|
"grad_norm": 0.39131683111190796, |
|
"learning_rate": 1.769252941190458e-08, |
|
"loss": 0.7935, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 0.39934051036834717, |
|
"learning_rate": 1.2288694530769862e-08, |
|
"loss": 0.78, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.44333237409591675, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.8765, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.952, |
|
"grad_norm": 0.37822088599205017, |
|
"learning_rate": 4.4250904955656095e-09, |
|
"loss": 0.807, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.964, |
|
"grad_norm": 0.41814127564430237, |
|
"learning_rate": 1.9668680847356735e-09, |
|
"loss": 0.8633, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 0.36398470401763916, |
|
"learning_rate": 4.91741202124918e-10, |
|
"loss": 0.7087, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.988, |
|
"grad_norm": 0.47774454951286316, |
|
"learning_rate": 0.0, |
|
"loss": 0.9268, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.988, |
|
"step": 249, |
|
"total_flos": 248634998849536.0, |
|
"train_loss": 0.883572997817074, |
|
"train_runtime": 14151.8701, |
|
"train_samples_per_second": 1.696, |
|
"train_steps_per_second": 0.018 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 249, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 248634998849536.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|