GUI-Actor-Verifier-2B / trainer_state.json
qianhuiwu's picture
Initial update for model weights.
71537da verified
raw
history blame
102 kB
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.6114260238201389,
"eval_steps": 10000.0,
"global_step": 6000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0010190433730335648,
"grad_norm": 39.0,
"learning_rate": 6.779661016949153e-07,
"loss": 8.6035,
"step": 10
},
{
"epoch": 0.0020380867460671297,
"grad_norm": 36.5,
"learning_rate": 1.3559322033898307e-06,
"loss": 8.3336,
"step": 20
},
{
"epoch": 0.003057130119100694,
"grad_norm": 28.0,
"learning_rate": 2.033898305084746e-06,
"loss": 7.3424,
"step": 30
},
{
"epoch": 0.004076173492134259,
"grad_norm": 9.5,
"learning_rate": 2.7118644067796613e-06,
"loss": 5.2891,
"step": 40
},
{
"epoch": 0.005095216865167823,
"grad_norm": 7.625,
"learning_rate": 3.3898305084745763e-06,
"loss": 3.1259,
"step": 50
},
{
"epoch": 0.006114260238201388,
"grad_norm": 1.2265625,
"learning_rate": 4.067796610169492e-06,
"loss": 1.9534,
"step": 60
},
{
"epoch": 0.007133303611234953,
"grad_norm": 0.984375,
"learning_rate": 4.745762711864408e-06,
"loss": 1.7291,
"step": 70
},
{
"epoch": 0.008152346984268519,
"grad_norm": 0.88671875,
"learning_rate": 5.423728813559323e-06,
"loss": 1.6523,
"step": 80
},
{
"epoch": 0.009171390357302084,
"grad_norm": 0.80859375,
"learning_rate": 6.1016949152542385e-06,
"loss": 1.5714,
"step": 90
},
{
"epoch": 0.010190433730335647,
"grad_norm": 0.80078125,
"learning_rate": 6.779661016949153e-06,
"loss": 1.5387,
"step": 100
},
{
"epoch": 0.011209477103369211,
"grad_norm": 0.6953125,
"learning_rate": 7.4576271186440685e-06,
"loss": 1.5065,
"step": 110
},
{
"epoch": 0.012228520476402776,
"grad_norm": 0.69140625,
"learning_rate": 8.135593220338983e-06,
"loss": 1.469,
"step": 120
},
{
"epoch": 0.013247563849436341,
"grad_norm": 0.81640625,
"learning_rate": 8.8135593220339e-06,
"loss": 1.4463,
"step": 130
},
{
"epoch": 0.014266607222469906,
"grad_norm": 0.95703125,
"learning_rate": 9.491525423728815e-06,
"loss": 1.3843,
"step": 140
},
{
"epoch": 0.01528565059550347,
"grad_norm": 1.09375,
"learning_rate": 1.016949152542373e-05,
"loss": 1.3006,
"step": 150
},
{
"epoch": 0.016304693968537037,
"grad_norm": 1.1171875,
"learning_rate": 1.0847457627118645e-05,
"loss": 1.1446,
"step": 160
},
{
"epoch": 0.0173237373415706,
"grad_norm": 1.0859375,
"learning_rate": 1.1525423728813561e-05,
"loss": 0.9626,
"step": 170
},
{
"epoch": 0.018342780714604167,
"grad_norm": 1.2109375,
"learning_rate": 1.2203389830508477e-05,
"loss": 0.789,
"step": 180
},
{
"epoch": 0.01936182408763773,
"grad_norm": 1.0,
"learning_rate": 1.288135593220339e-05,
"loss": 0.5722,
"step": 190
},
{
"epoch": 0.020380867460671293,
"grad_norm": 0.8359375,
"learning_rate": 1.3559322033898305e-05,
"loss": 0.387,
"step": 200
},
{
"epoch": 0.02139991083370486,
"grad_norm": 0.6796875,
"learning_rate": 1.4237288135593221e-05,
"loss": 0.2307,
"step": 210
},
{
"epoch": 0.022418954206738423,
"grad_norm": 0.470703125,
"learning_rate": 1.4915254237288137e-05,
"loss": 0.1231,
"step": 220
},
{
"epoch": 0.02343799757977199,
"grad_norm": 0.27734375,
"learning_rate": 1.5593220338983053e-05,
"loss": 0.0755,
"step": 230
},
{
"epoch": 0.024457040952805553,
"grad_norm": 0.357421875,
"learning_rate": 1.6271186440677967e-05,
"loss": 0.0474,
"step": 240
},
{
"epoch": 0.02547608432583912,
"grad_norm": 0.4375,
"learning_rate": 1.694915254237288e-05,
"loss": 0.042,
"step": 250
},
{
"epoch": 0.026495127698872682,
"grad_norm": 0.228515625,
"learning_rate": 1.76271186440678e-05,
"loss": 0.0411,
"step": 260
},
{
"epoch": 0.02751417107190625,
"grad_norm": 0.123046875,
"learning_rate": 1.8305084745762713e-05,
"loss": 0.0303,
"step": 270
},
{
"epoch": 0.028533214444939812,
"grad_norm": 0.146484375,
"learning_rate": 1.898305084745763e-05,
"loss": 0.0381,
"step": 280
},
{
"epoch": 0.02955225781797338,
"grad_norm": 0.0947265625,
"learning_rate": 1.9661016949152545e-05,
"loss": 0.0315,
"step": 290
},
{
"epoch": 0.03057130119100694,
"grad_norm": 0.1005859375,
"learning_rate": 1.999998638184544e-05,
"loss": 0.0337,
"step": 300
},
{
"epoch": 0.03159034456404051,
"grad_norm": 0.11279296875,
"learning_rate": 1.999987743683151e-05,
"loss": 0.0331,
"step": 310
},
{
"epoch": 0.032609387937074075,
"grad_norm": 0.0849609375,
"learning_rate": 1.9999659547990556e-05,
"loss": 0.0243,
"step": 320
},
{
"epoch": 0.033628431310107634,
"grad_norm": 0.08447265625,
"learning_rate": 1.9999332717696364e-05,
"loss": 0.0315,
"step": 330
},
{
"epoch": 0.0346474746831412,
"grad_norm": 0.07568359375,
"learning_rate": 1.99988969495096e-05,
"loss": 0.0263,
"step": 340
},
{
"epoch": 0.03566651805617477,
"grad_norm": 0.1083984375,
"learning_rate": 1.9998352248177743e-05,
"loss": 0.0302,
"step": 350
},
{
"epoch": 0.036685561429208334,
"grad_norm": 0.13671875,
"learning_rate": 1.999769861963505e-05,
"loss": 0.0298,
"step": 360
},
{
"epoch": 0.037704604802241894,
"grad_norm": 0.078125,
"learning_rate": 1.999693607100249e-05,
"loss": 0.0248,
"step": 370
},
{
"epoch": 0.03872364817527546,
"grad_norm": 0.10205078125,
"learning_rate": 1.9996064610587666e-05,
"loss": 0.0286,
"step": 380
},
{
"epoch": 0.03974269154830903,
"grad_norm": 0.10546875,
"learning_rate": 1.9995084247884704e-05,
"loss": 0.0254,
"step": 390
},
{
"epoch": 0.040761734921342586,
"grad_norm": 0.1474609375,
"learning_rate": 1.9993994993574196e-05,
"loss": 0.0267,
"step": 400
},
{
"epoch": 0.04178077829437615,
"grad_norm": 0.0869140625,
"learning_rate": 1.9992796859523033e-05,
"loss": 0.0261,
"step": 410
},
{
"epoch": 0.04279982166740972,
"grad_norm": 0.197265625,
"learning_rate": 1.9991489858784306e-05,
"loss": 0.0242,
"step": 420
},
{
"epoch": 0.043818865040443286,
"grad_norm": 0.126953125,
"learning_rate": 1.999007400559716e-05,
"loss": 0.0268,
"step": 430
},
{
"epoch": 0.044837908413476846,
"grad_norm": 0.1357421875,
"learning_rate": 1.9988549315386624e-05,
"loss": 0.0247,
"step": 440
},
{
"epoch": 0.04585695178651041,
"grad_norm": 0.271484375,
"learning_rate": 1.998691580476347e-05,
"loss": 0.0257,
"step": 450
},
{
"epoch": 0.04687599515954398,
"grad_norm": 0.11083984375,
"learning_rate": 1.9985173491523996e-05,
"loss": 0.0269,
"step": 460
},
{
"epoch": 0.047895038532577545,
"grad_norm": 0.06982421875,
"learning_rate": 1.9983322394649868e-05,
"loss": 0.0241,
"step": 470
},
{
"epoch": 0.048914081905611105,
"grad_norm": 0.10791015625,
"learning_rate": 1.9981362534307886e-05,
"loss": 0.0243,
"step": 480
},
{
"epoch": 0.04993312527864467,
"grad_norm": 0.05419921875,
"learning_rate": 1.9979293931849786e-05,
"loss": 0.023,
"step": 490
},
{
"epoch": 0.05095216865167824,
"grad_norm": 0.134765625,
"learning_rate": 1.9977116609811985e-05,
"loss": 0.0202,
"step": 500
},
{
"epoch": 0.051971212024711805,
"grad_norm": 0.16015625,
"learning_rate": 1.997483059191536e-05,
"loss": 0.0261,
"step": 510
},
{
"epoch": 0.052990255397745364,
"grad_norm": 0.06689453125,
"learning_rate": 1.9972435903064964e-05,
"loss": 0.0197,
"step": 520
},
{
"epoch": 0.05400929877077893,
"grad_norm": 0.05615234375,
"learning_rate": 1.9969932569349778e-05,
"loss": 0.0235,
"step": 530
},
{
"epoch": 0.0550283421438125,
"grad_norm": 0.095703125,
"learning_rate": 1.996732061804241e-05,
"loss": 0.0228,
"step": 540
},
{
"epoch": 0.056047385516846064,
"grad_norm": 0.07373046875,
"learning_rate": 1.9964600077598804e-05,
"loss": 0.0214,
"step": 550
},
{
"epoch": 0.057066428889879624,
"grad_norm": 0.07080078125,
"learning_rate": 1.9961770977657937e-05,
"loss": 0.0236,
"step": 560
},
{
"epoch": 0.05808547226291319,
"grad_norm": 0.061279296875,
"learning_rate": 1.9958833349041477e-05,
"loss": 0.02,
"step": 570
},
{
"epoch": 0.05910451563594676,
"grad_norm": 0.1572265625,
"learning_rate": 1.9955787223753474e-05,
"loss": 0.0264,
"step": 580
},
{
"epoch": 0.060123559008980317,
"grad_norm": 0.0576171875,
"learning_rate": 1.9952632634979984e-05,
"loss": 0.0231,
"step": 590
},
{
"epoch": 0.06114260238201388,
"grad_norm": 0.0673828125,
"learning_rate": 1.9949369617088728e-05,
"loss": 0.0206,
"step": 600
},
{
"epoch": 0.06216164575504745,
"grad_norm": 0.0576171875,
"learning_rate": 1.9945998205628706e-05,
"loss": 0.0238,
"step": 610
},
{
"epoch": 0.06318068912808102,
"grad_norm": 0.0498046875,
"learning_rate": 1.994251843732982e-05,
"loss": 0.0183,
"step": 620
},
{
"epoch": 0.06419973250111458,
"grad_norm": 0.06396484375,
"learning_rate": 1.9938930350102453e-05,
"loss": 0.0222,
"step": 630
},
{
"epoch": 0.06521877587414815,
"grad_norm": 0.10009765625,
"learning_rate": 1.993523398303709e-05,
"loss": 0.0232,
"step": 640
},
{
"epoch": 0.06623781924718171,
"grad_norm": 0.07177734375,
"learning_rate": 1.9931429376403854e-05,
"loss": 0.0226,
"step": 650
},
{
"epoch": 0.06725686262021527,
"grad_norm": 0.050537109375,
"learning_rate": 1.9927516571652096e-05,
"loss": 0.0229,
"step": 660
},
{
"epoch": 0.06827590599324884,
"grad_norm": 0.045166015625,
"learning_rate": 1.9923495611409933e-05,
"loss": 0.0184,
"step": 670
},
{
"epoch": 0.0692949493662824,
"grad_norm": 0.04638671875,
"learning_rate": 1.9919366539483776e-05,
"loss": 0.0236,
"step": 680
},
{
"epoch": 0.07031399273931596,
"grad_norm": 0.11376953125,
"learning_rate": 1.9915129400857872e-05,
"loss": 0.0214,
"step": 690
},
{
"epoch": 0.07133303611234953,
"grad_norm": 0.12060546875,
"learning_rate": 1.9910784241693796e-05,
"loss": 0.0188,
"step": 700
},
{
"epoch": 0.0723520794853831,
"grad_norm": 0.09423828125,
"learning_rate": 1.9906331109329952e-05,
"loss": 0.02,
"step": 710
},
{
"epoch": 0.07337112285841667,
"grad_norm": 0.043701171875,
"learning_rate": 1.9901770052281067e-05,
"loss": 0.0201,
"step": 720
},
{
"epoch": 0.07439016623145023,
"grad_norm": 0.068359375,
"learning_rate": 1.9897101120237644e-05,
"loss": 0.02,
"step": 730
},
{
"epoch": 0.07540920960448379,
"grad_norm": 0.1025390625,
"learning_rate": 1.9892324364065445e-05,
"loss": 0.02,
"step": 740
},
{
"epoch": 0.07642825297751736,
"grad_norm": 0.0859375,
"learning_rate": 1.9887439835804918e-05,
"loss": 0.0185,
"step": 750
},
{
"epoch": 0.07744729635055092,
"grad_norm": 0.064453125,
"learning_rate": 1.988244758867063e-05,
"loss": 0.0215,
"step": 760
},
{
"epoch": 0.07846633972358448,
"grad_norm": 0.08251953125,
"learning_rate": 1.9877347677050706e-05,
"loss": 0.018,
"step": 770
},
{
"epoch": 0.07948538309661805,
"grad_norm": 0.0546875,
"learning_rate": 1.987214015650621e-05,
"loss": 0.019,
"step": 780
},
{
"epoch": 0.08050442646965161,
"grad_norm": 0.0498046875,
"learning_rate": 1.9866825083770558e-05,
"loss": 0.023,
"step": 790
},
{
"epoch": 0.08152346984268517,
"grad_norm": 0.058837890625,
"learning_rate": 1.9861402516748897e-05,
"loss": 0.0176,
"step": 800
},
{
"epoch": 0.08254251321571875,
"grad_norm": 0.07861328125,
"learning_rate": 1.985587251451748e-05,
"loss": 0.0225,
"step": 810
},
{
"epoch": 0.0835615565887523,
"grad_norm": 0.05712890625,
"learning_rate": 1.9850235137322995e-05,
"loss": 0.018,
"step": 820
},
{
"epoch": 0.08458059996178588,
"grad_norm": 0.06396484375,
"learning_rate": 1.9844490446581945e-05,
"loss": 0.0217,
"step": 830
},
{
"epoch": 0.08559964333481944,
"grad_norm": 0.1083984375,
"learning_rate": 1.9838638504879955e-05,
"loss": 0.0187,
"step": 840
},
{
"epoch": 0.086618686707853,
"grad_norm": 0.0859375,
"learning_rate": 1.98326793759711e-05,
"loss": 0.0188,
"step": 850
},
{
"epoch": 0.08763773008088657,
"grad_norm": 0.07080078125,
"learning_rate": 1.9826613124777206e-05,
"loss": 0.0203,
"step": 860
},
{
"epoch": 0.08865677345392013,
"grad_norm": 0.12353515625,
"learning_rate": 1.9820439817387144e-05,
"loss": 0.0192,
"step": 870
},
{
"epoch": 0.08967581682695369,
"grad_norm": 0.05908203125,
"learning_rate": 1.981415952105611e-05,
"loss": 0.0184,
"step": 880
},
{
"epoch": 0.09069486019998727,
"grad_norm": 0.142578125,
"learning_rate": 1.98077723042049e-05,
"loss": 0.0203,
"step": 890
},
{
"epoch": 0.09171390357302082,
"grad_norm": 0.2255859375,
"learning_rate": 1.9801278236419147e-05,
"loss": 0.0193,
"step": 900
},
{
"epoch": 0.0927329469460544,
"grad_norm": 0.068359375,
"learning_rate": 1.9794677388448584e-05,
"loss": 0.0192,
"step": 910
},
{
"epoch": 0.09375199031908796,
"grad_norm": 0.1123046875,
"learning_rate": 1.9787969832206245e-05,
"loss": 0.0177,
"step": 920
},
{
"epoch": 0.09477103369212152,
"grad_norm": 0.09716796875,
"learning_rate": 1.978115564076772e-05,
"loss": 0.0205,
"step": 930
},
{
"epoch": 0.09579007706515509,
"grad_norm": 0.103515625,
"learning_rate": 1.9774234888370324e-05,
"loss": 0.0185,
"step": 940
},
{
"epoch": 0.09680912043818865,
"grad_norm": 0.267578125,
"learning_rate": 1.976720765041231e-05,
"loss": 0.0167,
"step": 950
},
{
"epoch": 0.09782816381122221,
"grad_norm": 0.059814453125,
"learning_rate": 1.9760074003452028e-05,
"loss": 0.0197,
"step": 960
},
{
"epoch": 0.09884720718425578,
"grad_norm": 0.0537109375,
"learning_rate": 1.9752834025207116e-05,
"loss": 0.0166,
"step": 970
},
{
"epoch": 0.09986625055728934,
"grad_norm": 0.068359375,
"learning_rate": 1.974548779455364e-05,
"loss": 0.021,
"step": 980
},
{
"epoch": 0.1008852939303229,
"grad_norm": 0.053955078125,
"learning_rate": 1.973803539152522e-05,
"loss": 0.0186,
"step": 990
},
{
"epoch": 0.10190433730335648,
"grad_norm": 0.10546875,
"learning_rate": 1.9730476897312187e-05,
"loss": 0.0163,
"step": 1000
},
{
"epoch": 0.10292338067639004,
"grad_norm": 0.11572265625,
"learning_rate": 1.9722812394260674e-05,
"loss": 0.0208,
"step": 1010
},
{
"epoch": 0.10394242404942361,
"grad_norm": 0.07373046875,
"learning_rate": 1.9715041965871743e-05,
"loss": 0.0179,
"step": 1020
},
{
"epoch": 0.10496146742245717,
"grad_norm": 0.06103515625,
"learning_rate": 1.970716569680044e-05,
"loss": 0.0182,
"step": 1030
},
{
"epoch": 0.10598051079549073,
"grad_norm": 0.05126953125,
"learning_rate": 1.9699183672854916e-05,
"loss": 0.0177,
"step": 1040
},
{
"epoch": 0.1069995541685243,
"grad_norm": 0.08642578125,
"learning_rate": 1.9691095980995455e-05,
"loss": 0.0175,
"step": 1050
},
{
"epoch": 0.10801859754155786,
"grad_norm": 0.0859375,
"learning_rate": 1.9682902709333552e-05,
"loss": 0.0198,
"step": 1060
},
{
"epoch": 0.10903764091459142,
"grad_norm": 0.09912109375,
"learning_rate": 1.967460394713093e-05,
"loss": 0.0169,
"step": 1070
},
{
"epoch": 0.110056684287625,
"grad_norm": 0.068359375,
"learning_rate": 1.96661997847986e-05,
"loss": 0.0179,
"step": 1080
},
{
"epoch": 0.11107572766065855,
"grad_norm": 0.125,
"learning_rate": 1.9657690313895834e-05,
"loss": 0.0188,
"step": 1090
},
{
"epoch": 0.11209477103369213,
"grad_norm": 0.09033203125,
"learning_rate": 1.9649075627129208e-05,
"loss": 0.0159,
"step": 1100
},
{
"epoch": 0.11311381440672569,
"grad_norm": 0.045654296875,
"learning_rate": 1.964035581835156e-05,
"loss": 0.0172,
"step": 1110
},
{
"epoch": 0.11413285777975925,
"grad_norm": 0.04541015625,
"learning_rate": 1.9631530982560996e-05,
"loss": 0.0183,
"step": 1120
},
{
"epoch": 0.11515190115279282,
"grad_norm": 0.05859375,
"learning_rate": 1.962260121589983e-05,
"loss": 0.0193,
"step": 1130
},
{
"epoch": 0.11617094452582638,
"grad_norm": 0.056884765625,
"learning_rate": 1.9613566615653546e-05,
"loss": 0.0193,
"step": 1140
},
{
"epoch": 0.11718998789885994,
"grad_norm": 0.103515625,
"learning_rate": 1.960442728024975e-05,
"loss": 0.0163,
"step": 1150
},
{
"epoch": 0.11820903127189351,
"grad_norm": 0.048828125,
"learning_rate": 1.9595183309257074e-05,
"loss": 0.0186,
"step": 1160
},
{
"epoch": 0.11922807464492707,
"grad_norm": 0.064453125,
"learning_rate": 1.958583480338411e-05,
"loss": 0.0181,
"step": 1170
},
{
"epoch": 0.12024711801796063,
"grad_norm": 0.0400390625,
"learning_rate": 1.957638186447831e-05,
"loss": 0.0187,
"step": 1180
},
{
"epoch": 0.1212661613909942,
"grad_norm": 0.055908203125,
"learning_rate": 1.956682459552487e-05,
"loss": 0.0195,
"step": 1190
},
{
"epoch": 0.12228520476402777,
"grad_norm": 0.048828125,
"learning_rate": 1.9557163100645612e-05,
"loss": 0.016,
"step": 1200
},
{
"epoch": 0.12330424813706134,
"grad_norm": 0.1044921875,
"learning_rate": 1.9547397485097846e-05,
"loss": 0.0194,
"step": 1210
},
{
"epoch": 0.1243232915100949,
"grad_norm": 0.05322265625,
"learning_rate": 1.953752785527323e-05,
"loss": 0.0162,
"step": 1220
},
{
"epoch": 0.12534233488312846,
"grad_norm": 0.06982421875,
"learning_rate": 1.9527554318696607e-05,
"loss": 0.0193,
"step": 1230
},
{
"epoch": 0.12636137825616203,
"grad_norm": 0.045654296875,
"learning_rate": 1.9517476984024838e-05,
"loss": 0.0171,
"step": 1240
},
{
"epoch": 0.1273804216291956,
"grad_norm": 0.08349609375,
"learning_rate": 1.95072959610456e-05,
"loss": 0.0158,
"step": 1250
},
{
"epoch": 0.12839946500222915,
"grad_norm": 0.042724609375,
"learning_rate": 1.949701136067622e-05,
"loss": 0.0211,
"step": 1260
},
{
"epoch": 0.12941850837526273,
"grad_norm": 0.06689453125,
"learning_rate": 1.948662329496244e-05,
"loss": 0.0168,
"step": 1270
},
{
"epoch": 0.1304375517482963,
"grad_norm": 0.06591796875,
"learning_rate": 1.9476131877077205e-05,
"loss": 0.0175,
"step": 1280
},
{
"epoch": 0.13145659512132984,
"grad_norm": 0.05078125,
"learning_rate": 1.9465537221319453e-05,
"loss": 0.0182,
"step": 1290
},
{
"epoch": 0.13247563849436342,
"grad_norm": 0.11474609375,
"learning_rate": 1.945483944311282e-05,
"loss": 0.0153,
"step": 1300
},
{
"epoch": 0.133494681867397,
"grad_norm": 0.04150390625,
"learning_rate": 1.944403865900443e-05,
"loss": 0.0211,
"step": 1310
},
{
"epoch": 0.13451372524043054,
"grad_norm": 0.0732421875,
"learning_rate": 1.9433134986663596e-05,
"loss": 0.0153,
"step": 1320
},
{
"epoch": 0.1355327686134641,
"grad_norm": 0.0703125,
"learning_rate": 1.9422128544880562e-05,
"loss": 0.0177,
"step": 1330
},
{
"epoch": 0.13655181198649768,
"grad_norm": 0.0517578125,
"learning_rate": 1.9411019453565174e-05,
"loss": 0.0187,
"step": 1340
},
{
"epoch": 0.13757085535953123,
"grad_norm": 0.07861328125,
"learning_rate": 1.9399807833745622e-05,
"loss": 0.0151,
"step": 1350
},
{
"epoch": 0.1385898987325648,
"grad_norm": 0.05615234375,
"learning_rate": 1.9388493807567067e-05,
"loss": 0.0183,
"step": 1360
},
{
"epoch": 0.13960894210559838,
"grad_norm": 0.087890625,
"learning_rate": 1.937707749829036e-05,
"loss": 0.0171,
"step": 1370
},
{
"epoch": 0.14062798547863192,
"grad_norm": 0.0546875,
"learning_rate": 1.936555903029066e-05,
"loss": 0.0158,
"step": 1380
},
{
"epoch": 0.1416470288516655,
"grad_norm": 0.057861328125,
"learning_rate": 1.935393852905611e-05,
"loss": 0.0183,
"step": 1390
},
{
"epoch": 0.14266607222469907,
"grad_norm": 0.11376953125,
"learning_rate": 1.9342216121186447e-05,
"loss": 0.0158,
"step": 1400
},
{
"epoch": 0.14368511559773262,
"grad_norm": 0.043212890625,
"learning_rate": 1.9330391934391635e-05,
"loss": 0.02,
"step": 1410
},
{
"epoch": 0.1447041589707662,
"grad_norm": 0.06201171875,
"learning_rate": 1.931846609749047e-05,
"loss": 0.0177,
"step": 1420
},
{
"epoch": 0.14572320234379976,
"grad_norm": 0.07421875,
"learning_rate": 1.9306438740409168e-05,
"loss": 0.0184,
"step": 1430
},
{
"epoch": 0.14674224571683334,
"grad_norm": 0.048095703125,
"learning_rate": 1.9294309994179972e-05,
"loss": 0.0172,
"step": 1440
},
{
"epoch": 0.14776128908986688,
"grad_norm": 0.05322265625,
"learning_rate": 1.9282079990939707e-05,
"loss": 0.0159,
"step": 1450
},
{
"epoch": 0.14878033246290046,
"grad_norm": 0.08984375,
"learning_rate": 1.9269748863928338e-05,
"loss": 0.0176,
"step": 1460
},
{
"epoch": 0.14979937583593403,
"grad_norm": 0.052978515625,
"learning_rate": 1.9257316747487528e-05,
"loss": 0.0169,
"step": 1470
},
{
"epoch": 0.15081841920896757,
"grad_norm": 0.05859375,
"learning_rate": 1.9244783777059175e-05,
"loss": 0.016,
"step": 1480
},
{
"epoch": 0.15183746258200115,
"grad_norm": 0.07763671875,
"learning_rate": 1.9232150089183925e-05,
"loss": 0.018,
"step": 1490
},
{
"epoch": 0.15285650595503472,
"grad_norm": 0.051513671875,
"learning_rate": 1.9219415821499697e-05,
"loss": 0.0169,
"step": 1500
},
{
"epoch": 0.15387554932806827,
"grad_norm": 0.060546875,
"learning_rate": 1.9206581112740175e-05,
"loss": 0.0199,
"step": 1510
},
{
"epoch": 0.15489459270110184,
"grad_norm": 0.052734375,
"learning_rate": 1.9193646102733307e-05,
"loss": 0.0166,
"step": 1520
},
{
"epoch": 0.15591363607413541,
"grad_norm": 0.06689453125,
"learning_rate": 1.918061093239976e-05,
"loss": 0.0192,
"step": 1530
},
{
"epoch": 0.15693267944716896,
"grad_norm": 0.0556640625,
"learning_rate": 1.916747574375142e-05,
"loss": 0.0187,
"step": 1540
},
{
"epoch": 0.15795172282020253,
"grad_norm": 0.04833984375,
"learning_rate": 1.9154240679889806e-05,
"loss": 0.0161,
"step": 1550
},
{
"epoch": 0.1589707661932361,
"grad_norm": 0.033447265625,
"learning_rate": 1.914090588500454e-05,
"loss": 0.0182,
"step": 1560
},
{
"epoch": 0.15998980956626965,
"grad_norm": 0.049560546875,
"learning_rate": 1.912747150437176e-05,
"loss": 0.0167,
"step": 1570
},
{
"epoch": 0.16100885293930323,
"grad_norm": 0.1748046875,
"learning_rate": 1.9113937684352536e-05,
"loss": 0.0194,
"step": 1580
},
{
"epoch": 0.1620278963123368,
"grad_norm": 0.04345703125,
"learning_rate": 1.91003045723913e-05,
"loss": 0.021,
"step": 1590
},
{
"epoch": 0.16304693968537035,
"grad_norm": 0.05029296875,
"learning_rate": 1.908657231701421e-05,
"loss": 0.0173,
"step": 1600
},
{
"epoch": 0.16406598305840392,
"grad_norm": 0.09619140625,
"learning_rate": 1.9072741067827542e-05,
"loss": 0.0208,
"step": 1610
},
{
"epoch": 0.1650850264314375,
"grad_norm": 0.061279296875,
"learning_rate": 1.9058810975516067e-05,
"loss": 0.0197,
"step": 1620
},
{
"epoch": 0.16610406980447107,
"grad_norm": 0.045654296875,
"learning_rate": 1.90447821918414e-05,
"loss": 0.0177,
"step": 1630
},
{
"epoch": 0.1671231131775046,
"grad_norm": 0.0615234375,
"learning_rate": 1.9030654869640356e-05,
"loss": 0.0185,
"step": 1640
},
{
"epoch": 0.16814215655053819,
"grad_norm": 0.125,
"learning_rate": 1.901642916282327e-05,
"loss": 0.0141,
"step": 1650
},
{
"epoch": 0.16916119992357176,
"grad_norm": 0.091796875,
"learning_rate": 1.900210522637234e-05,
"loss": 0.0212,
"step": 1660
},
{
"epoch": 0.1701802432966053,
"grad_norm": 0.04833984375,
"learning_rate": 1.8987683216339924e-05,
"loss": 0.0173,
"step": 1670
},
{
"epoch": 0.17119928666963888,
"grad_norm": 0.060546875,
"learning_rate": 1.897316328984684e-05,
"loss": 0.019,
"step": 1680
},
{
"epoch": 0.17221833004267245,
"grad_norm": 0.033935546875,
"learning_rate": 1.895854560508067e-05,
"loss": 0.0201,
"step": 1690
},
{
"epoch": 0.173237373415706,
"grad_norm": 0.049560546875,
"learning_rate": 1.8943830321294015e-05,
"loss": 0.0149,
"step": 1700
},
{
"epoch": 0.17425641678873957,
"grad_norm": 0.043212890625,
"learning_rate": 1.892901759880277e-05,
"loss": 0.0194,
"step": 1710
},
{
"epoch": 0.17527546016177314,
"grad_norm": 0.0556640625,
"learning_rate": 1.8914107598984385e-05,
"loss": 0.0156,
"step": 1720
},
{
"epoch": 0.1762945035348067,
"grad_norm": 0.083984375,
"learning_rate": 1.889910048427609e-05,
"loss": 0.0173,
"step": 1730
},
{
"epoch": 0.17731354690784026,
"grad_norm": 0.044921875,
"learning_rate": 1.8883996418173142e-05,
"loss": 0.019,
"step": 1740
},
{
"epoch": 0.17833259028087384,
"grad_norm": 0.08251953125,
"learning_rate": 1.886879556522704e-05,
"loss": 0.0167,
"step": 1750
},
{
"epoch": 0.17935163365390738,
"grad_norm": 0.043701171875,
"learning_rate": 1.8853498091043715e-05,
"loss": 0.0203,
"step": 1760
},
{
"epoch": 0.18037067702694096,
"grad_norm": 0.06396484375,
"learning_rate": 1.883810416228175e-05,
"loss": 0.0185,
"step": 1770
},
{
"epoch": 0.18138972039997453,
"grad_norm": 0.06982421875,
"learning_rate": 1.8822613946650555e-05,
"loss": 0.0178,
"step": 1780
},
{
"epoch": 0.18240876377300808,
"grad_norm": 0.080078125,
"learning_rate": 1.880702761290853e-05,
"loss": 0.0162,
"step": 1790
},
{
"epoch": 0.18342780714604165,
"grad_norm": 0.046875,
"learning_rate": 1.8791345330861244e-05,
"loss": 0.0172,
"step": 1800
},
{
"epoch": 0.18444685051907522,
"grad_norm": 0.0361328125,
"learning_rate": 1.8775567271359575e-05,
"loss": 0.0192,
"step": 1810
},
{
"epoch": 0.1854658938921088,
"grad_norm": 0.04345703125,
"learning_rate": 1.875969360629785e-05,
"loss": 0.0168,
"step": 1820
},
{
"epoch": 0.18648493726514234,
"grad_norm": 0.044677734375,
"learning_rate": 1.8743724508611962e-05,
"loss": 0.0155,
"step": 1830
},
{
"epoch": 0.18750398063817592,
"grad_norm": 0.040771484375,
"learning_rate": 1.872766015227751e-05,
"loss": 0.0174,
"step": 1840
},
{
"epoch": 0.1885230240112095,
"grad_norm": 0.050537109375,
"learning_rate": 1.871150071230789e-05,
"loss": 0.0162,
"step": 1850
},
{
"epoch": 0.18954206738424303,
"grad_norm": 0.036865234375,
"learning_rate": 1.8695246364752376e-05,
"loss": 0.019,
"step": 1860
},
{
"epoch": 0.1905611107572766,
"grad_norm": 0.05029296875,
"learning_rate": 1.8678897286694222e-05,
"loss": 0.016,
"step": 1870
},
{
"epoch": 0.19158015413031018,
"grad_norm": 0.0673828125,
"learning_rate": 1.8662453656248725e-05,
"loss": 0.0161,
"step": 1880
},
{
"epoch": 0.19259919750334373,
"grad_norm": 0.044921875,
"learning_rate": 1.8645915652561286e-05,
"loss": 0.0166,
"step": 1890
},
{
"epoch": 0.1936182408763773,
"grad_norm": 0.09814453125,
"learning_rate": 1.862928345580545e-05,
"loss": 0.0159,
"step": 1900
},
{
"epoch": 0.19463728424941087,
"grad_norm": 0.0908203125,
"learning_rate": 1.861255724718096e-05,
"loss": 0.019,
"step": 1910
},
{
"epoch": 0.19565632762244442,
"grad_norm": 0.040771484375,
"learning_rate": 1.8595737208911762e-05,
"loss": 0.0168,
"step": 1920
},
{
"epoch": 0.196675370995478,
"grad_norm": 0.05029296875,
"learning_rate": 1.857882352424404e-05,
"loss": 0.0169,
"step": 1930
},
{
"epoch": 0.19769441436851157,
"grad_norm": 0.068359375,
"learning_rate": 1.8561816377444206e-05,
"loss": 0.0197,
"step": 1940
},
{
"epoch": 0.1987134577415451,
"grad_norm": 0.03662109375,
"learning_rate": 1.8544715953796893e-05,
"loss": 0.0162,
"step": 1950
},
{
"epoch": 0.1997325011145787,
"grad_norm": 0.03466796875,
"learning_rate": 1.8527522439602946e-05,
"loss": 0.0217,
"step": 1960
},
{
"epoch": 0.20075154448761226,
"grad_norm": 0.03662109375,
"learning_rate": 1.851023602217739e-05,
"loss": 0.0164,
"step": 1970
},
{
"epoch": 0.2017705878606458,
"grad_norm": 0.04638671875,
"learning_rate": 1.849285688984737e-05,
"loss": 0.0176,
"step": 1980
},
{
"epoch": 0.20278963123367938,
"grad_norm": 0.053955078125,
"learning_rate": 1.847538523195013e-05,
"loss": 0.0201,
"step": 1990
},
{
"epoch": 0.20380867460671295,
"grad_norm": 0.0400390625,
"learning_rate": 1.8457821238830933e-05,
"loss": 0.0155,
"step": 2000
},
{
"epoch": 0.20482771797974653,
"grad_norm": 0.032958984375,
"learning_rate": 1.8440165101840985e-05,
"loss": 0.0177,
"step": 2010
},
{
"epoch": 0.20584676135278007,
"grad_norm": 0.06787109375,
"learning_rate": 1.8422417013335353e-05,
"loss": 0.0159,
"step": 2020
},
{
"epoch": 0.20686580472581365,
"grad_norm": 0.048828125,
"learning_rate": 1.8404577166670883e-05,
"loss": 0.0165,
"step": 2030
},
{
"epoch": 0.20788484809884722,
"grad_norm": 0.043701171875,
"learning_rate": 1.838664575620407e-05,
"loss": 0.0166,
"step": 2040
},
{
"epoch": 0.20890389147188076,
"grad_norm": 0.07568359375,
"learning_rate": 1.8368622977288955e-05,
"loss": 0.0151,
"step": 2050
},
{
"epoch": 0.20992293484491434,
"grad_norm": 0.06298828125,
"learning_rate": 1.8350509026274997e-05,
"loss": 0.0168,
"step": 2060
},
{
"epoch": 0.2109419782179479,
"grad_norm": 0.03564453125,
"learning_rate": 1.833230410050493e-05,
"loss": 0.0157,
"step": 2070
},
{
"epoch": 0.21196102159098146,
"grad_norm": 0.11572265625,
"learning_rate": 1.831400839831261e-05,
"loss": 0.0141,
"step": 2080
},
{
"epoch": 0.21298006496401503,
"grad_norm": 0.09326171875,
"learning_rate": 1.8295622119020867e-05,
"loss": 0.0179,
"step": 2090
},
{
"epoch": 0.2139991083370486,
"grad_norm": 0.10400390625,
"learning_rate": 1.8277145462939313e-05,
"loss": 0.0152,
"step": 2100
},
{
"epoch": 0.21501815171008215,
"grad_norm": 0.08154296875,
"learning_rate": 1.8258578631362182e-05,
"loss": 0.0187,
"step": 2110
},
{
"epoch": 0.21603719508311572,
"grad_norm": 0.04345703125,
"learning_rate": 1.823992182656612e-05,
"loss": 0.017,
"step": 2120
},
{
"epoch": 0.2170562384561493,
"grad_norm": 0.05908203125,
"learning_rate": 1.822117525180799e-05,
"loss": 0.016,
"step": 2130
},
{
"epoch": 0.21807528182918284,
"grad_norm": 0.05712890625,
"learning_rate": 1.8202339111322657e-05,
"loss": 0.017,
"step": 2140
},
{
"epoch": 0.21909432520221642,
"grad_norm": 0.0654296875,
"learning_rate": 1.8183413610320756e-05,
"loss": 0.0151,
"step": 2150
},
{
"epoch": 0.22011336857525,
"grad_norm": 0.03857421875,
"learning_rate": 1.8164398954986463e-05,
"loss": 0.0178,
"step": 2160
},
{
"epoch": 0.22113241194828354,
"grad_norm": 0.06396484375,
"learning_rate": 1.8145295352475258e-05,
"loss": 0.0183,
"step": 2170
},
{
"epoch": 0.2221514553213171,
"grad_norm": 0.06201171875,
"learning_rate": 1.812610301091164e-05,
"loss": 0.018,
"step": 2180
},
{
"epoch": 0.22317049869435068,
"grad_norm": 0.045654296875,
"learning_rate": 1.8106822139386886e-05,
"loss": 0.0188,
"step": 2190
},
{
"epoch": 0.22418954206738426,
"grad_norm": 0.0849609375,
"learning_rate": 1.808745294795677e-05,
"loss": 0.0152,
"step": 2200
},
{
"epoch": 0.2252085854404178,
"grad_norm": 0.057373046875,
"learning_rate": 1.8067995647639257e-05,
"loss": 0.0186,
"step": 2210
},
{
"epoch": 0.22622762881345138,
"grad_norm": 0.050537109375,
"learning_rate": 1.804845045041222e-05,
"loss": 0.0185,
"step": 2220
},
{
"epoch": 0.22724667218648495,
"grad_norm": 0.0537109375,
"learning_rate": 1.8028817569211137e-05,
"loss": 0.0161,
"step": 2230
},
{
"epoch": 0.2282657155595185,
"grad_norm": 0.064453125,
"learning_rate": 1.8009097217926736e-05,
"loss": 0.0178,
"step": 2240
},
{
"epoch": 0.22928475893255207,
"grad_norm": 0.055908203125,
"learning_rate": 1.7989289611402717e-05,
"loss": 0.0154,
"step": 2250
},
{
"epoch": 0.23030380230558564,
"grad_norm": 0.07080078125,
"learning_rate": 1.7969394965433365e-05,
"loss": 0.0181,
"step": 2260
},
{
"epoch": 0.2313228456786192,
"grad_norm": 0.037841796875,
"learning_rate": 1.7949413496761222e-05,
"loss": 0.0178,
"step": 2270
},
{
"epoch": 0.23234188905165276,
"grad_norm": 0.04541015625,
"learning_rate": 1.792934542307473e-05,
"loss": 0.017,
"step": 2280
},
{
"epoch": 0.23336093242468633,
"grad_norm": 0.06201171875,
"learning_rate": 1.7909190963005833e-05,
"loss": 0.0197,
"step": 2290
},
{
"epoch": 0.23437997579771988,
"grad_norm": 0.09228515625,
"learning_rate": 1.788895033612763e-05,
"loss": 0.0139,
"step": 2300
},
{
"epoch": 0.23539901917075345,
"grad_norm": 0.042724609375,
"learning_rate": 1.7868623762951957e-05,
"loss": 0.0172,
"step": 2310
},
{
"epoch": 0.23641806254378703,
"grad_norm": 0.03759765625,
"learning_rate": 1.7848211464927e-05,
"loss": 0.0162,
"step": 2320
},
{
"epoch": 0.23743710591682057,
"grad_norm": 0.036865234375,
"learning_rate": 1.7827713664434864e-05,
"loss": 0.017,
"step": 2330
},
{
"epoch": 0.23845614928985415,
"grad_norm": 0.040771484375,
"learning_rate": 1.7807130584789175e-05,
"loss": 0.0161,
"step": 2340
},
{
"epoch": 0.23947519266288772,
"grad_norm": 0.06103515625,
"learning_rate": 1.7786462450232624e-05,
"loss": 0.0135,
"step": 2350
},
{
"epoch": 0.24049423603592127,
"grad_norm": 0.043212890625,
"learning_rate": 1.7765709485934538e-05,
"loss": 0.0187,
"step": 2360
},
{
"epoch": 0.24151327940895484,
"grad_norm": 0.048828125,
"learning_rate": 1.7744871917988424e-05,
"loss": 0.0161,
"step": 2370
},
{
"epoch": 0.2425323227819884,
"grad_norm": 0.0693359375,
"learning_rate": 1.772394997340951e-05,
"loss": 0.0165,
"step": 2380
},
{
"epoch": 0.243551366155022,
"grad_norm": 0.051513671875,
"learning_rate": 1.7702943880132254e-05,
"loss": 0.0171,
"step": 2390
},
{
"epoch": 0.24457040952805553,
"grad_norm": 0.052978515625,
"learning_rate": 1.7681853867007884e-05,
"loss": 0.0164,
"step": 2400
},
{
"epoch": 0.2455894529010891,
"grad_norm": 0.045654296875,
"learning_rate": 1.7660680163801885e-05,
"loss": 0.0171,
"step": 2410
},
{
"epoch": 0.24660849627412268,
"grad_norm": 0.05517578125,
"learning_rate": 1.7639423001191517e-05,
"loss": 0.0182,
"step": 2420
},
{
"epoch": 0.24762753964715623,
"grad_norm": 0.07666015625,
"learning_rate": 1.7618082610763275e-05,
"loss": 0.0159,
"step": 2430
},
{
"epoch": 0.2486465830201898,
"grad_norm": 0.04345703125,
"learning_rate": 1.75966592250104e-05,
"loss": 0.0169,
"step": 2440
},
{
"epoch": 0.24966562639322337,
"grad_norm": 0.047119140625,
"learning_rate": 1.7575153077330305e-05,
"loss": 0.013,
"step": 2450
},
{
"epoch": 0.2506846697662569,
"grad_norm": 0.06591796875,
"learning_rate": 1.7553564402022073e-05,
"loss": 0.0181,
"step": 2460
},
{
"epoch": 0.25170371313929046,
"grad_norm": 0.046142578125,
"learning_rate": 1.753189343428387e-05,
"loss": 0.0186,
"step": 2470
},
{
"epoch": 0.25272275651232406,
"grad_norm": 0.06005859375,
"learning_rate": 1.7510140410210416e-05,
"loss": 0.0157,
"step": 2480
},
{
"epoch": 0.2537417998853576,
"grad_norm": 0.05029296875,
"learning_rate": 1.7488305566790372e-05,
"loss": 0.0162,
"step": 2490
},
{
"epoch": 0.2547608432583912,
"grad_norm": 0.042724609375,
"learning_rate": 1.7466389141903805e-05,
"loss": 0.013,
"step": 2500
},
{
"epoch": 0.25577988663142476,
"grad_norm": 0.056640625,
"learning_rate": 1.744439137431955e-05,
"loss": 0.0162,
"step": 2510
},
{
"epoch": 0.2567989300044583,
"grad_norm": 0.03173828125,
"learning_rate": 1.742231250369266e-05,
"loss": 0.0157,
"step": 2520
},
{
"epoch": 0.2578179733774919,
"grad_norm": 0.06689453125,
"learning_rate": 1.740015277056173e-05,
"loss": 0.0154,
"step": 2530
},
{
"epoch": 0.25883701675052545,
"grad_norm": 0.047607421875,
"learning_rate": 1.7377912416346348e-05,
"loss": 0.0181,
"step": 2540
},
{
"epoch": 0.259856060123559,
"grad_norm": 0.06298828125,
"learning_rate": 1.7355591683344405e-05,
"loss": 0.0126,
"step": 2550
},
{
"epoch": 0.2608751034965926,
"grad_norm": 0.08935546875,
"learning_rate": 1.7333190814729494e-05,
"loss": 0.0162,
"step": 2560
},
{
"epoch": 0.26189414686962614,
"grad_norm": 0.06298828125,
"learning_rate": 1.7310710054548235e-05,
"loss": 0.0178,
"step": 2570
},
{
"epoch": 0.2629131902426597,
"grad_norm": 0.0546875,
"learning_rate": 1.7288149647717637e-05,
"loss": 0.0166,
"step": 2580
},
{
"epoch": 0.2639322336156933,
"grad_norm": 0.05078125,
"learning_rate": 1.7265509840022425e-05,
"loss": 0.0163,
"step": 2590
},
{
"epoch": 0.26495127698872684,
"grad_norm": 0.07861328125,
"learning_rate": 1.7242790878112344e-05,
"loss": 0.0139,
"step": 2600
},
{
"epoch": 0.2659703203617604,
"grad_norm": 0.05078125,
"learning_rate": 1.72199930094995e-05,
"loss": 0.0182,
"step": 2610
},
{
"epoch": 0.266989363734794,
"grad_norm": 0.03857421875,
"learning_rate": 1.7197116482555634e-05,
"loss": 0.0155,
"step": 2620
},
{
"epoch": 0.26800840710782753,
"grad_norm": 0.038330078125,
"learning_rate": 1.7174161546509448e-05,
"loss": 0.0174,
"step": 2630
},
{
"epoch": 0.2690274504808611,
"grad_norm": 0.04150390625,
"learning_rate": 1.715112845144386e-05,
"loss": 0.0195,
"step": 2640
},
{
"epoch": 0.2700464938538947,
"grad_norm": 0.0595703125,
"learning_rate": 1.7128017448293298e-05,
"loss": 0.0149,
"step": 2650
},
{
"epoch": 0.2710655372269282,
"grad_norm": 0.0732421875,
"learning_rate": 1.7104828788840966e-05,
"loss": 0.0173,
"step": 2660
},
{
"epoch": 0.27208458059996177,
"grad_norm": 0.055419921875,
"learning_rate": 1.7081562725716086e-05,
"loss": 0.0167,
"step": 2670
},
{
"epoch": 0.27310362397299537,
"grad_norm": 0.04052734375,
"learning_rate": 1.7058219512391162e-05,
"loss": 0.0147,
"step": 2680
},
{
"epoch": 0.2741226673460289,
"grad_norm": 0.060546875,
"learning_rate": 1.7034799403179208e-05,
"loss": 0.0164,
"step": 2690
},
{
"epoch": 0.27514171071906246,
"grad_norm": 0.035400390625,
"learning_rate": 1.701130265323099e-05,
"loss": 0.0148,
"step": 2700
},
{
"epoch": 0.27616075409209606,
"grad_norm": 0.051513671875,
"learning_rate": 1.6987729518532224e-05,
"loss": 0.0185,
"step": 2710
},
{
"epoch": 0.2771797974651296,
"grad_norm": 0.0771484375,
"learning_rate": 1.696408025590081e-05,
"loss": 0.0153,
"step": 2720
},
{
"epoch": 0.27819884083816315,
"grad_norm": 0.034423828125,
"learning_rate": 1.6940355122984032e-05,
"loss": 0.0163,
"step": 2730
},
{
"epoch": 0.27921788421119675,
"grad_norm": 0.052978515625,
"learning_rate": 1.6916554378255726e-05,
"loss": 0.0167,
"step": 2740
},
{
"epoch": 0.2802369275842303,
"grad_norm": 0.051025390625,
"learning_rate": 1.6892678281013495e-05,
"loss": 0.0147,
"step": 2750
},
{
"epoch": 0.28125597095726385,
"grad_norm": 0.07763671875,
"learning_rate": 1.686872709137587e-05,
"loss": 0.0176,
"step": 2760
},
{
"epoch": 0.28227501433029745,
"grad_norm": 0.044677734375,
"learning_rate": 1.684470107027947e-05,
"loss": 0.0149,
"step": 2770
},
{
"epoch": 0.283294057703331,
"grad_norm": 0.0439453125,
"learning_rate": 1.6820600479476176e-05,
"loss": 0.0148,
"step": 2780
},
{
"epoch": 0.28431310107636454,
"grad_norm": 0.042236328125,
"learning_rate": 1.6796425581530266e-05,
"loss": 0.0164,
"step": 2790
},
{
"epoch": 0.28533214444939814,
"grad_norm": 0.03759765625,
"learning_rate": 1.6772176639815555e-05,
"loss": 0.0149,
"step": 2800
},
{
"epoch": 0.2863511878224317,
"grad_norm": 0.0849609375,
"learning_rate": 1.6747853918512534e-05,
"loss": 0.0184,
"step": 2810
},
{
"epoch": 0.28737023119546523,
"grad_norm": 0.03515625,
"learning_rate": 1.6723457682605488e-05,
"loss": 0.0148,
"step": 2820
},
{
"epoch": 0.28838927456849883,
"grad_norm": 0.07080078125,
"learning_rate": 1.6698988197879596e-05,
"loss": 0.0143,
"step": 2830
},
{
"epoch": 0.2894083179415324,
"grad_norm": 0.034423828125,
"learning_rate": 1.6674445730918066e-05,
"loss": 0.0171,
"step": 2840
},
{
"epoch": 0.2904273613145659,
"grad_norm": 0.03759765625,
"learning_rate": 1.6649830549099198e-05,
"loss": 0.0131,
"step": 2850
},
{
"epoch": 0.2914464046875995,
"grad_norm": 0.0419921875,
"learning_rate": 1.662514292059349e-05,
"loss": 0.0157,
"step": 2860
},
{
"epoch": 0.29246544806063307,
"grad_norm": 0.0634765625,
"learning_rate": 1.660038311436071e-05,
"loss": 0.0151,
"step": 2870
},
{
"epoch": 0.29348449143366667,
"grad_norm": 0.06591796875,
"learning_rate": 1.6575551400146974e-05,
"loss": 0.0138,
"step": 2880
},
{
"epoch": 0.2945035348067002,
"grad_norm": 0.02880859375,
"learning_rate": 1.6550648048481793e-05,
"loss": 0.0156,
"step": 2890
},
{
"epoch": 0.29552257817973376,
"grad_norm": 0.049560546875,
"learning_rate": 1.6525673330675133e-05,
"loss": 0.0149,
"step": 2900
},
{
"epoch": 0.29654162155276736,
"grad_norm": 0.052490234375,
"learning_rate": 1.6500627518814466e-05,
"loss": 0.0163,
"step": 2910
},
{
"epoch": 0.2975606649258009,
"grad_norm": 0.04052734375,
"learning_rate": 1.6475510885761793e-05,
"loss": 0.0167,
"step": 2920
},
{
"epoch": 0.29857970829883446,
"grad_norm": 0.078125,
"learning_rate": 1.6450323705150684e-05,
"loss": 0.0163,
"step": 2930
},
{
"epoch": 0.29959875167186806,
"grad_norm": 0.034423828125,
"learning_rate": 1.6425066251383285e-05,
"loss": 0.0163,
"step": 2940
},
{
"epoch": 0.3006177950449016,
"grad_norm": 0.06494140625,
"learning_rate": 1.6399738799627334e-05,
"loss": 0.0136,
"step": 2950
},
{
"epoch": 0.30163683841793515,
"grad_norm": 0.076171875,
"learning_rate": 1.6374341625813165e-05,
"loss": 0.018,
"step": 2960
},
{
"epoch": 0.30265588179096875,
"grad_norm": 0.11767578125,
"learning_rate": 1.6348875006630706e-05,
"loss": 0.0169,
"step": 2970
},
{
"epoch": 0.3036749251640023,
"grad_norm": 0.1015625,
"learning_rate": 1.6323339219526447e-05,
"loss": 0.0158,
"step": 2980
},
{
"epoch": 0.30469396853703584,
"grad_norm": 0.0341796875,
"learning_rate": 1.6297734542700438e-05,
"loss": 0.0173,
"step": 2990
},
{
"epoch": 0.30571301191006944,
"grad_norm": 0.06884765625,
"learning_rate": 1.6272061255103247e-05,
"loss": 0.012,
"step": 3000
},
{
"epoch": 0.306732055283103,
"grad_norm": 0.043701171875,
"learning_rate": 1.624631963643292e-05,
"loss": 0.0165,
"step": 3010
},
{
"epoch": 0.30775109865613653,
"grad_norm": 0.046142578125,
"learning_rate": 1.622050996713194e-05,
"loss": 0.0145,
"step": 3020
},
{
"epoch": 0.30877014202917014,
"grad_norm": 0.0791015625,
"learning_rate": 1.6194632528384167e-05,
"loss": 0.0144,
"step": 3030
},
{
"epoch": 0.3097891854022037,
"grad_norm": 0.07275390625,
"learning_rate": 1.616868760211178e-05,
"loss": 0.0157,
"step": 3040
},
{
"epoch": 0.3108082287752372,
"grad_norm": 0.07861328125,
"learning_rate": 1.6142675470972198e-05,
"loss": 0.0129,
"step": 3050
},
{
"epoch": 0.31182727214827083,
"grad_norm": 0.05224609375,
"learning_rate": 1.6116596418355007e-05,
"loss": 0.0186,
"step": 3060
},
{
"epoch": 0.3128463155213044,
"grad_norm": 0.037841796875,
"learning_rate": 1.6090450728378865e-05,
"loss": 0.0154,
"step": 3070
},
{
"epoch": 0.3138653588943379,
"grad_norm": 0.059326171875,
"learning_rate": 1.6064238685888424e-05,
"loss": 0.0168,
"step": 3080
},
{
"epoch": 0.3148844022673715,
"grad_norm": 0.039306640625,
"learning_rate": 1.6037960576451198e-05,
"loss": 0.0148,
"step": 3090
},
{
"epoch": 0.31590344564040507,
"grad_norm": 0.058837890625,
"learning_rate": 1.6011616686354478e-05,
"loss": 0.0148,
"step": 3100
},
{
"epoch": 0.3169224890134386,
"grad_norm": 0.0625,
"learning_rate": 1.5985207302602205e-05,
"loss": 0.0163,
"step": 3110
},
{
"epoch": 0.3179415323864722,
"grad_norm": 0.03466796875,
"learning_rate": 1.595873271291184e-05,
"loss": 0.0159,
"step": 3120
},
{
"epoch": 0.31896057575950576,
"grad_norm": 0.041748046875,
"learning_rate": 1.593219320571123e-05,
"loss": 0.0162,
"step": 3130
},
{
"epoch": 0.3199796191325393,
"grad_norm": 0.035400390625,
"learning_rate": 1.590558907013546e-05,
"loss": 0.0142,
"step": 3140
},
{
"epoch": 0.3209986625055729,
"grad_norm": 0.034912109375,
"learning_rate": 1.587892059602372e-05,
"loss": 0.0131,
"step": 3150
},
{
"epoch": 0.32201770587860645,
"grad_norm": 0.044921875,
"learning_rate": 1.5852188073916145e-05,
"loss": 0.0165,
"step": 3160
},
{
"epoch": 0.32303674925164,
"grad_norm": 0.042236328125,
"learning_rate": 1.5825391795050615e-05,
"loss": 0.0161,
"step": 3170
},
{
"epoch": 0.3240557926246736,
"grad_norm": 0.044189453125,
"learning_rate": 1.5798532051359628e-05,
"loss": 0.0135,
"step": 3180
},
{
"epoch": 0.32507483599770715,
"grad_norm": 0.04931640625,
"learning_rate": 1.577160913546711e-05,
"loss": 0.0153,
"step": 3190
},
{
"epoch": 0.3260938793707407,
"grad_norm": 0.0517578125,
"learning_rate": 1.574462334068519e-05,
"loss": 0.0133,
"step": 3200
},
{
"epoch": 0.3271129227437743,
"grad_norm": 0.050537109375,
"learning_rate": 1.5717574961011058e-05,
"loss": 0.0175,
"step": 3210
},
{
"epoch": 0.32813196611680784,
"grad_norm": 0.054931640625,
"learning_rate": 1.5690464291123716e-05,
"loss": 0.0147,
"step": 3220
},
{
"epoch": 0.3291510094898414,
"grad_norm": 0.08984375,
"learning_rate": 1.5663291626380805e-05,
"loss": 0.0133,
"step": 3230
},
{
"epoch": 0.330170052862875,
"grad_norm": 0.055419921875,
"learning_rate": 1.5636057262815357e-05,
"loss": 0.0155,
"step": 3240
},
{
"epoch": 0.33118909623590853,
"grad_norm": 0.02587890625,
"learning_rate": 1.560876149713259e-05,
"loss": 0.0132,
"step": 3250
},
{
"epoch": 0.33220813960894213,
"grad_norm": 0.059326171875,
"learning_rate": 1.558140462670667e-05,
"loss": 0.0181,
"step": 3260
},
{
"epoch": 0.3332271829819757,
"grad_norm": 0.0390625,
"learning_rate": 1.555398694957746e-05,
"loss": 0.0169,
"step": 3270
},
{
"epoch": 0.3342462263550092,
"grad_norm": 0.0361328125,
"learning_rate": 1.552650876444729e-05,
"loss": 0.0136,
"step": 3280
},
{
"epoch": 0.3352652697280428,
"grad_norm": 0.04638671875,
"learning_rate": 1.5498970370677694e-05,
"loss": 0.0164,
"step": 3290
},
{
"epoch": 0.33628431310107637,
"grad_norm": 0.06982421875,
"learning_rate": 1.547137206828615e-05,
"loss": 0.0122,
"step": 3300
},
{
"epoch": 0.3373033564741099,
"grad_norm": 0.07861328125,
"learning_rate": 1.544371415794281e-05,
"loss": 0.0167,
"step": 3310
},
{
"epoch": 0.3383223998471435,
"grad_norm": 0.06982421875,
"learning_rate": 1.541599694096723e-05,
"loss": 0.0174,
"step": 3320
},
{
"epoch": 0.33934144322017706,
"grad_norm": 0.08642578125,
"learning_rate": 1.538822071932508e-05,
"loss": 0.0146,
"step": 3330
},
{
"epoch": 0.3403604865932106,
"grad_norm": 0.03759765625,
"learning_rate": 1.5360385795624853e-05,
"loss": 0.0178,
"step": 3340
},
{
"epoch": 0.3413795299662442,
"grad_norm": 0.043701171875,
"learning_rate": 1.5332492473114584e-05,
"loss": 0.0139,
"step": 3350
},
{
"epoch": 0.34239857333927776,
"grad_norm": 0.047607421875,
"learning_rate": 1.530454105567852e-05,
"loss": 0.0154,
"step": 3360
},
{
"epoch": 0.3434176167123113,
"grad_norm": 0.0595703125,
"learning_rate": 1.5276531847833834e-05,
"loss": 0.0145,
"step": 3370
},
{
"epoch": 0.3444366600853449,
"grad_norm": 0.046875,
"learning_rate": 1.52484651547273e-05,
"loss": 0.0148,
"step": 3380
},
{
"epoch": 0.34545570345837845,
"grad_norm": 0.0361328125,
"learning_rate": 1.5220341282131963e-05,
"loss": 0.0183,
"step": 3390
},
{
"epoch": 0.346474746831412,
"grad_norm": 0.055908203125,
"learning_rate": 1.5192160536443798e-05,
"loss": 0.0142,
"step": 3400
},
{
"epoch": 0.3474937902044456,
"grad_norm": 0.047607421875,
"learning_rate": 1.5163923224678407e-05,
"loss": 0.0179,
"step": 3410
},
{
"epoch": 0.34851283357747914,
"grad_norm": 0.09326171875,
"learning_rate": 1.5135629654467639e-05,
"loss": 0.0154,
"step": 3420
},
{
"epoch": 0.3495318769505127,
"grad_norm": 0.0712890625,
"learning_rate": 1.5107280134056251e-05,
"loss": 0.0148,
"step": 3430
},
{
"epoch": 0.3505509203235463,
"grad_norm": 0.0732421875,
"learning_rate": 1.5078874972298554e-05,
"loss": 0.0157,
"step": 3440
},
{
"epoch": 0.35156996369657983,
"grad_norm": 0.05419921875,
"learning_rate": 1.5050414478655046e-05,
"loss": 0.0134,
"step": 3450
},
{
"epoch": 0.3525890070696134,
"grad_norm": 0.051025390625,
"learning_rate": 1.5021898963189037e-05,
"loss": 0.0159,
"step": 3460
},
{
"epoch": 0.353608050442647,
"grad_norm": 0.0625,
"learning_rate": 1.4993328736563267e-05,
"loss": 0.0154,
"step": 3470
},
{
"epoch": 0.3546270938156805,
"grad_norm": 0.048828125,
"learning_rate": 1.4964704110036541e-05,
"loss": 0.0148,
"step": 3480
},
{
"epoch": 0.3556461371887141,
"grad_norm": 0.045654296875,
"learning_rate": 1.4936025395460314e-05,
"loss": 0.016,
"step": 3490
},
{
"epoch": 0.3566651805617477,
"grad_norm": 0.07080078125,
"learning_rate": 1.490729290527531e-05,
"loss": 0.0126,
"step": 3500
},
{
"epoch": 0.3576842239347812,
"grad_norm": 0.0498046875,
"learning_rate": 1.4878506952508103e-05,
"loss": 0.0171,
"step": 3510
},
{
"epoch": 0.35870326730781477,
"grad_norm": 0.05029296875,
"learning_rate": 1.484966785076773e-05,
"loss": 0.016,
"step": 3520
},
{
"epoch": 0.35972231068084837,
"grad_norm": 0.03369140625,
"learning_rate": 1.4820775914242249e-05,
"loss": 0.015,
"step": 3530
},
{
"epoch": 0.3607413540538819,
"grad_norm": 0.06103515625,
"learning_rate": 1.4791831457695333e-05,
"loss": 0.0155,
"step": 3540
},
{
"epoch": 0.36176039742691546,
"grad_norm": 0.03955078125,
"learning_rate": 1.4762834796462832e-05,
"loss": 0.0143,
"step": 3550
},
{
"epoch": 0.36277944079994906,
"grad_norm": 0.05712890625,
"learning_rate": 1.4733786246449346e-05,
"loss": 0.0166,
"step": 3560
},
{
"epoch": 0.3637984841729826,
"grad_norm": 0.08740234375,
"learning_rate": 1.4704686124124767e-05,
"loss": 0.0151,
"step": 3570
},
{
"epoch": 0.36481752754601615,
"grad_norm": 0.06689453125,
"learning_rate": 1.4675534746520858e-05,
"loss": 0.016,
"step": 3580
},
{
"epoch": 0.36583657091904975,
"grad_norm": 0.06298828125,
"learning_rate": 1.4646332431227775e-05,
"loss": 0.0164,
"step": 3590
},
{
"epoch": 0.3668556142920833,
"grad_norm": 0.047607421875,
"learning_rate": 1.461707949639061e-05,
"loss": 0.0126,
"step": 3600
},
{
"epoch": 0.36787465766511684,
"grad_norm": 0.09619140625,
"learning_rate": 1.4587776260705937e-05,
"loss": 0.0174,
"step": 3610
},
{
"epoch": 0.36889370103815045,
"grad_norm": 0.03515625,
"learning_rate": 1.4558423043418337e-05,
"loss": 0.0165,
"step": 3620
},
{
"epoch": 0.369912744411184,
"grad_norm": 0.06787109375,
"learning_rate": 1.4529020164316913e-05,
"loss": 0.0137,
"step": 3630
},
{
"epoch": 0.3709317877842176,
"grad_norm": 0.04150390625,
"learning_rate": 1.4499567943731803e-05,
"loss": 0.017,
"step": 3640
},
{
"epoch": 0.37195083115725114,
"grad_norm": 0.06396484375,
"learning_rate": 1.4470066702530705e-05,
"loss": 0.014,
"step": 3650
},
{
"epoch": 0.3729698745302847,
"grad_norm": 0.0537109375,
"learning_rate": 1.4440516762115372e-05,
"loss": 0.0167,
"step": 3660
},
{
"epoch": 0.3739889179033183,
"grad_norm": 0.0380859375,
"learning_rate": 1.4410918444418106e-05,
"loss": 0.0151,
"step": 3670
},
{
"epoch": 0.37500796127635183,
"grad_norm": 0.10498046875,
"learning_rate": 1.4381272071898263e-05,
"loss": 0.0142,
"step": 3680
},
{
"epoch": 0.3760270046493854,
"grad_norm": 0.03515625,
"learning_rate": 1.4351577967538726e-05,
"loss": 0.0172,
"step": 3690
},
{
"epoch": 0.377046048022419,
"grad_norm": 0.057861328125,
"learning_rate": 1.4321836454842399e-05,
"loss": 0.0145,
"step": 3700
},
{
"epoch": 0.3780650913954525,
"grad_norm": 0.0498046875,
"learning_rate": 1.4292047857828672e-05,
"loss": 0.0167,
"step": 3710
},
{
"epoch": 0.37908413476848607,
"grad_norm": 0.060546875,
"learning_rate": 1.4262212501029904e-05,
"loss": 0.0157,
"step": 3720
},
{
"epoch": 0.38010317814151967,
"grad_norm": 0.04931640625,
"learning_rate": 1.4232330709487871e-05,
"loss": 0.0149,
"step": 3730
},
{
"epoch": 0.3811222215145532,
"grad_norm": 0.048095703125,
"learning_rate": 1.4202402808750235e-05,
"loss": 0.0184,
"step": 3740
},
{
"epoch": 0.38214126488758676,
"grad_norm": 0.05029296875,
"learning_rate": 1.4172429124866993e-05,
"loss": 0.0137,
"step": 3750
},
{
"epoch": 0.38316030826062036,
"grad_norm": 0.046875,
"learning_rate": 1.4142409984386937e-05,
"loss": 0.0167,
"step": 3760
},
{
"epoch": 0.3841793516336539,
"grad_norm": 0.052978515625,
"learning_rate": 1.411234571435408e-05,
"loss": 0.0146,
"step": 3770
},
{
"epoch": 0.38519839500668746,
"grad_norm": 0.043212890625,
"learning_rate": 1.4082236642304095e-05,
"loss": 0.0146,
"step": 3780
},
{
"epoch": 0.38621743837972106,
"grad_norm": 0.037109375,
"learning_rate": 1.4052083096260758e-05,
"loss": 0.0174,
"step": 3790
},
{
"epoch": 0.3872364817527546,
"grad_norm": 0.09912109375,
"learning_rate": 1.4021885404732367e-05,
"loss": 0.0129,
"step": 3800
},
{
"epoch": 0.38825552512578815,
"grad_norm": 0.09326171875,
"learning_rate": 1.3991643896708163e-05,
"loss": 0.0167,
"step": 3810
},
{
"epoch": 0.38927456849882175,
"grad_norm": 0.091796875,
"learning_rate": 1.3961358901654743e-05,
"loss": 0.0153,
"step": 3820
},
{
"epoch": 0.3902936118718553,
"grad_norm": 0.0771484375,
"learning_rate": 1.393103074951248e-05,
"loss": 0.0179,
"step": 3830
},
{
"epoch": 0.39131265524488884,
"grad_norm": 0.05224609375,
"learning_rate": 1.3900659770691918e-05,
"loss": 0.0165,
"step": 3840
},
{
"epoch": 0.39233169861792244,
"grad_norm": 0.0439453125,
"learning_rate": 1.3870246296070176e-05,
"loss": 0.013,
"step": 3850
},
{
"epoch": 0.393350741990956,
"grad_norm": 0.041015625,
"learning_rate": 1.3839790656987355e-05,
"loss": 0.0162,
"step": 3860
},
{
"epoch": 0.39436978536398953,
"grad_norm": 0.044189453125,
"learning_rate": 1.3809293185242903e-05,
"loss": 0.0158,
"step": 3870
},
{
"epoch": 0.39538882873702313,
"grad_norm": 0.048095703125,
"learning_rate": 1.3778754213092019e-05,
"loss": 0.0144,
"step": 3880
},
{
"epoch": 0.3964078721100567,
"grad_norm": 0.05322265625,
"learning_rate": 1.3748174073242035e-05,
"loss": 0.0153,
"step": 3890
},
{
"epoch": 0.3974269154830902,
"grad_norm": 0.052978515625,
"learning_rate": 1.3717553098848781e-05,
"loss": 0.0128,
"step": 3900
},
{
"epoch": 0.3984459588561238,
"grad_norm": 0.051513671875,
"learning_rate": 1.368689162351296e-05,
"loss": 0.0171,
"step": 3910
},
{
"epoch": 0.3994650022291574,
"grad_norm": 0.044921875,
"learning_rate": 1.3656189981276507e-05,
"loss": 0.0158,
"step": 3920
},
{
"epoch": 0.4004840456021909,
"grad_norm": 0.06591796875,
"learning_rate": 1.3625448506618967e-05,
"loss": 0.0147,
"step": 3930
},
{
"epoch": 0.4015030889752245,
"grad_norm": 0.0517578125,
"learning_rate": 1.3594667534453834e-05,
"loss": 0.0172,
"step": 3940
},
{
"epoch": 0.40252213234825807,
"grad_norm": 0.04833984375,
"learning_rate": 1.3563847400124906e-05,
"loss": 0.0149,
"step": 3950
},
{
"epoch": 0.4035411757212916,
"grad_norm": 0.055908203125,
"learning_rate": 1.353298843940264e-05,
"loss": 0.017,
"step": 3960
},
{
"epoch": 0.4045602190943252,
"grad_norm": 0.036865234375,
"learning_rate": 1.3502090988480483e-05,
"loss": 0.015,
"step": 3970
},
{
"epoch": 0.40557926246735876,
"grad_norm": 0.06591796875,
"learning_rate": 1.3471155383971215e-05,
"loss": 0.0145,
"step": 3980
},
{
"epoch": 0.4065983058403923,
"grad_norm": 0.052001953125,
"learning_rate": 1.344018196290328e-05,
"loss": 0.0156,
"step": 3990
},
{
"epoch": 0.4076173492134259,
"grad_norm": 0.035888671875,
"learning_rate": 1.340917106271712e-05,
"loss": 0.0147,
"step": 4000
},
{
"epoch": 0.40863639258645945,
"grad_norm": 0.0576171875,
"learning_rate": 1.337812302126149e-05,
"loss": 0.0156,
"step": 4010
},
{
"epoch": 0.40965543595949305,
"grad_norm": 0.047119140625,
"learning_rate": 1.3347038176789778e-05,
"loss": 0.0143,
"step": 4020
},
{
"epoch": 0.4106744793325266,
"grad_norm": 0.0400390625,
"learning_rate": 1.3315916867956329e-05,
"loss": 0.0153,
"step": 4030
},
{
"epoch": 0.41169352270556014,
"grad_norm": 0.0498046875,
"learning_rate": 1.3284759433812748e-05,
"loss": 0.0161,
"step": 4040
},
{
"epoch": 0.41271256607859375,
"grad_norm": 0.0546875,
"learning_rate": 1.3253566213804208e-05,
"loss": 0.0135,
"step": 4050
},
{
"epoch": 0.4137316094516273,
"grad_norm": 0.059326171875,
"learning_rate": 1.3222337547765743e-05,
"loss": 0.0152,
"step": 4060
},
{
"epoch": 0.41475065282466084,
"grad_norm": 0.047119140625,
"learning_rate": 1.3191073775918571e-05,
"loss": 0.0158,
"step": 4070
},
{
"epoch": 0.41576969619769444,
"grad_norm": 0.12060546875,
"learning_rate": 1.315977523886636e-05,
"loss": 0.0132,
"step": 4080
},
{
"epoch": 0.416788739570728,
"grad_norm": 0.07421875,
"learning_rate": 1.3128442277591522e-05,
"loss": 0.0162,
"step": 4090
},
{
"epoch": 0.41780778294376153,
"grad_norm": 0.083984375,
"learning_rate": 1.3097075233451523e-05,
"loss": 0.0142,
"step": 4100
},
{
"epoch": 0.41882682631679513,
"grad_norm": 0.08349609375,
"learning_rate": 1.3065674448175129e-05,
"loss": 0.0179,
"step": 4110
},
{
"epoch": 0.4198458696898287,
"grad_norm": 0.046630859375,
"learning_rate": 1.3034240263858706e-05,
"loss": 0.0161,
"step": 4120
},
{
"epoch": 0.4208649130628622,
"grad_norm": 0.038330078125,
"learning_rate": 1.3002773022962485e-05,
"loss": 0.0143,
"step": 4130
},
{
"epoch": 0.4218839564358958,
"grad_norm": 0.03662109375,
"learning_rate": 1.297127306830683e-05,
"loss": 0.0161,
"step": 4140
},
{
"epoch": 0.42290299980892937,
"grad_norm": 0.060302734375,
"learning_rate": 1.2939740743068515e-05,
"loss": 0.0134,
"step": 4150
},
{
"epoch": 0.4239220431819629,
"grad_norm": 0.0595703125,
"learning_rate": 1.2908176390776958e-05,
"loss": 0.0193,
"step": 4160
},
{
"epoch": 0.4249410865549965,
"grad_norm": 0.035400390625,
"learning_rate": 1.287658035531052e-05,
"loss": 0.0163,
"step": 4170
},
{
"epoch": 0.42596012992803006,
"grad_norm": 0.039306640625,
"learning_rate": 1.2844952980892715e-05,
"loss": 0.0151,
"step": 4180
},
{
"epoch": 0.4269791733010636,
"grad_norm": 0.048095703125,
"learning_rate": 1.2813294612088485e-05,
"loss": 0.0153,
"step": 4190
},
{
"epoch": 0.4279982166740972,
"grad_norm": 0.049072265625,
"learning_rate": 1.2781605593800449e-05,
"loss": 0.0125,
"step": 4200
},
{
"epoch": 0.42901726004713076,
"grad_norm": 0.05126953125,
"learning_rate": 1.2749886271265127e-05,
"loss": 0.0163,
"step": 4210
},
{
"epoch": 0.4300363034201643,
"grad_norm": 0.0390625,
"learning_rate": 1.2718136990049193e-05,
"loss": 0.0136,
"step": 4220
},
{
"epoch": 0.4310553467931979,
"grad_norm": 0.06494140625,
"learning_rate": 1.2686358096045706e-05,
"loss": 0.0147,
"step": 4230
},
{
"epoch": 0.43207439016623145,
"grad_norm": 0.0439453125,
"learning_rate": 1.2654549935470346e-05,
"loss": 0.0168,
"step": 4240
},
{
"epoch": 0.433093433539265,
"grad_norm": 0.05810546875,
"learning_rate": 1.2622712854857632e-05,
"loss": 0.012,
"step": 4250
},
{
"epoch": 0.4341124769122986,
"grad_norm": 0.06201171875,
"learning_rate": 1.2590847201057155e-05,
"loss": 0.0156,
"step": 4260
},
{
"epoch": 0.43513152028533214,
"grad_norm": 0.049072265625,
"learning_rate": 1.2558953321229799e-05,
"loss": 0.016,
"step": 4270
},
{
"epoch": 0.4361505636583657,
"grad_norm": 0.043212890625,
"learning_rate": 1.2527031562843954e-05,
"loss": 0.0161,
"step": 4280
},
{
"epoch": 0.4371696070313993,
"grad_norm": 0.07275390625,
"learning_rate": 1.2495082273671736e-05,
"loss": 0.0181,
"step": 4290
},
{
"epoch": 0.43818865040443283,
"grad_norm": 0.06787109375,
"learning_rate": 1.2463105801785196e-05,
"loss": 0.0128,
"step": 4300
},
{
"epoch": 0.4392076937774664,
"grad_norm": 0.0419921875,
"learning_rate": 1.2431102495552524e-05,
"loss": 0.0176,
"step": 4310
},
{
"epoch": 0.4402267371505,
"grad_norm": 0.057861328125,
"learning_rate": 1.2399072703634257e-05,
"loss": 0.0168,
"step": 4320
},
{
"epoch": 0.4412457805235335,
"grad_norm": 0.06982421875,
"learning_rate": 1.236701677497948e-05,
"loss": 0.0146,
"step": 4330
},
{
"epoch": 0.44226482389656707,
"grad_norm": 0.0693359375,
"learning_rate": 1.2334935058822042e-05,
"loss": 0.0156,
"step": 4340
},
{
"epoch": 0.4432838672696007,
"grad_norm": 0.03466796875,
"learning_rate": 1.2302827904676706e-05,
"loss": 0.0114,
"step": 4350
},
{
"epoch": 0.4443029106426342,
"grad_norm": 0.05126953125,
"learning_rate": 1.2270695662335393e-05,
"loss": 0.0162,
"step": 4360
},
{
"epoch": 0.44532195401566776,
"grad_norm": 0.043212890625,
"learning_rate": 1.223853868186333e-05,
"loss": 0.0152,
"step": 4370
},
{
"epoch": 0.44634099738870137,
"grad_norm": 0.099609375,
"learning_rate": 1.220635731359527e-05,
"loss": 0.0143,
"step": 4380
},
{
"epoch": 0.4473600407617349,
"grad_norm": 0.03564453125,
"learning_rate": 1.217415190813165e-05,
"loss": 0.0152,
"step": 4390
},
{
"epoch": 0.4483790841347685,
"grad_norm": 0.051513671875,
"learning_rate": 1.2141922816334779e-05,
"loss": 0.0144,
"step": 4400
},
{
"epoch": 0.44939812750780206,
"grad_norm": 0.050048828125,
"learning_rate": 1.2109670389325028e-05,
"loss": 0.0167,
"step": 4410
},
{
"epoch": 0.4504171708808356,
"grad_norm": 0.07275390625,
"learning_rate": 1.207739497847698e-05,
"loss": 0.0161,
"step": 4420
},
{
"epoch": 0.4514362142538692,
"grad_norm": 0.0498046875,
"learning_rate": 1.2045096935415623e-05,
"loss": 0.0135,
"step": 4430
},
{
"epoch": 0.45245525762690275,
"grad_norm": 0.04150390625,
"learning_rate": 1.2012776612012511e-05,
"loss": 0.0158,
"step": 4440
},
{
"epoch": 0.4534743009999363,
"grad_norm": 0.055908203125,
"learning_rate": 1.1980434360381935e-05,
"loss": 0.0154,
"step": 4450
},
{
"epoch": 0.4544933443729699,
"grad_norm": 0.05029296875,
"learning_rate": 1.1948070532877083e-05,
"loss": 0.0169,
"step": 4460
},
{
"epoch": 0.45551238774600344,
"grad_norm": 0.03955078125,
"learning_rate": 1.1915685482086187e-05,
"loss": 0.0169,
"step": 4470
},
{
"epoch": 0.456531431119037,
"grad_norm": 0.047607421875,
"learning_rate": 1.1883279560828722e-05,
"loss": 0.0134,
"step": 4480
},
{
"epoch": 0.4575504744920706,
"grad_norm": 0.038818359375,
"learning_rate": 1.1850853122151516e-05,
"loss": 0.0155,
"step": 4490
},
{
"epoch": 0.45856951786510414,
"grad_norm": 0.06005859375,
"learning_rate": 1.1818406519324933e-05,
"loss": 0.0119,
"step": 4500
},
{
"epoch": 0.4595885612381377,
"grad_norm": 0.055419921875,
"learning_rate": 1.1785940105839013e-05,
"loss": 0.0169,
"step": 4510
},
{
"epoch": 0.4606076046111713,
"grad_norm": 0.04150390625,
"learning_rate": 1.1753454235399625e-05,
"loss": 0.0156,
"step": 4520
},
{
"epoch": 0.46162664798420483,
"grad_norm": 0.038330078125,
"learning_rate": 1.1720949261924614e-05,
"loss": 0.0124,
"step": 4530
},
{
"epoch": 0.4626456913572384,
"grad_norm": 0.06396484375,
"learning_rate": 1.1688425539539936e-05,
"loss": 0.0149,
"step": 4540
},
{
"epoch": 0.463664734730272,
"grad_norm": 0.048828125,
"learning_rate": 1.1655883422575818e-05,
"loss": 0.0137,
"step": 4550
},
{
"epoch": 0.4646837781033055,
"grad_norm": 0.07861328125,
"learning_rate": 1.1623323265562876e-05,
"loss": 0.0165,
"step": 4560
},
{
"epoch": 0.46570282147633907,
"grad_norm": 0.06494140625,
"learning_rate": 1.1590745423228273e-05,
"loss": 0.0149,
"step": 4570
},
{
"epoch": 0.46672186484937267,
"grad_norm": 0.042236328125,
"learning_rate": 1.1558150250491843e-05,
"loss": 0.0147,
"step": 4580
},
{
"epoch": 0.4677409082224062,
"grad_norm": 0.03955078125,
"learning_rate": 1.1525538102462217e-05,
"loss": 0.0135,
"step": 4590
},
{
"epoch": 0.46875995159543976,
"grad_norm": 0.09521484375,
"learning_rate": 1.1492909334432978e-05,
"loss": 0.0137,
"step": 4600
},
{
"epoch": 0.46977899496847336,
"grad_norm": 0.06640625,
"learning_rate": 1.1460264301878762e-05,
"loss": 0.016,
"step": 4610
},
{
"epoch": 0.4707980383415069,
"grad_norm": 0.06396484375,
"learning_rate": 1.1427603360451415e-05,
"loss": 0.0146,
"step": 4620
},
{
"epoch": 0.47181708171454045,
"grad_norm": 0.06982421875,
"learning_rate": 1.1394926865976088e-05,
"loss": 0.0141,
"step": 4630
},
{
"epoch": 0.47283612508757406,
"grad_norm": 0.05810546875,
"learning_rate": 1.1362235174447378e-05,
"loss": 0.0164,
"step": 4640
},
{
"epoch": 0.4738551684606076,
"grad_norm": 0.0400390625,
"learning_rate": 1.1329528642025453e-05,
"loss": 0.0132,
"step": 4650
},
{
"epoch": 0.47487421183364115,
"grad_norm": 0.0556640625,
"learning_rate": 1.1296807625032162e-05,
"loss": 0.0177,
"step": 4660
},
{
"epoch": 0.47589325520667475,
"grad_norm": 0.03759765625,
"learning_rate": 1.1264072479947153e-05,
"loss": 0.0151,
"step": 4670
},
{
"epoch": 0.4769122985797083,
"grad_norm": 0.05224609375,
"learning_rate": 1.1231323563403994e-05,
"loss": 0.0133,
"step": 4680
},
{
"epoch": 0.47793134195274184,
"grad_norm": 0.060302734375,
"learning_rate": 1.1198561232186291e-05,
"loss": 0.0144,
"step": 4690
},
{
"epoch": 0.47895038532577544,
"grad_norm": 0.056640625,
"learning_rate": 1.1165785843223792e-05,
"loss": 0.0124,
"step": 4700
},
{
"epoch": 0.479969428698809,
"grad_norm": 0.060791015625,
"learning_rate": 1.1132997753588503e-05,
"loss": 0.0167,
"step": 4710
},
{
"epoch": 0.48098847207184253,
"grad_norm": 0.04931640625,
"learning_rate": 1.1100197320490802e-05,
"loss": 0.016,
"step": 4720
},
{
"epoch": 0.48200751544487613,
"grad_norm": 0.2216796875,
"learning_rate": 1.1067384901275538e-05,
"loss": 0.0129,
"step": 4730
},
{
"epoch": 0.4830265588179097,
"grad_norm": 0.0400390625,
"learning_rate": 1.1034560853418144e-05,
"loss": 0.0157,
"step": 4740
},
{
"epoch": 0.4840456021909432,
"grad_norm": 0.0654296875,
"learning_rate": 1.100172553452074e-05,
"loss": 0.0116,
"step": 4750
},
{
"epoch": 0.4850646455639768,
"grad_norm": 0.0361328125,
"learning_rate": 1.0968879302308244e-05,
"loss": 0.0166,
"step": 4760
},
{
"epoch": 0.48608368893701037,
"grad_norm": 0.057861328125,
"learning_rate": 1.0936022514624464e-05,
"loss": 0.0141,
"step": 4770
},
{
"epoch": 0.487102732310044,
"grad_norm": 0.04345703125,
"learning_rate": 1.0903155529428207e-05,
"loss": 0.0152,
"step": 4780
},
{
"epoch": 0.4881217756830775,
"grad_norm": 0.0712890625,
"learning_rate": 1.087027870478938e-05,
"loss": 0.017,
"step": 4790
},
{
"epoch": 0.48914081905611106,
"grad_norm": 0.05224609375,
"learning_rate": 1.0837392398885078e-05,
"loss": 0.014,
"step": 4800
},
{
"epoch": 0.49015986242914467,
"grad_norm": 0.046875,
"learning_rate": 1.0804496969995694e-05,
"loss": 0.0172,
"step": 4810
},
{
"epoch": 0.4911789058021782,
"grad_norm": 0.03759765625,
"learning_rate": 1.0771592776501017e-05,
"loss": 0.0157,
"step": 4820
},
{
"epoch": 0.49219794917521176,
"grad_norm": 0.041259765625,
"learning_rate": 1.0738680176876315e-05,
"loss": 0.0149,
"step": 4830
},
{
"epoch": 0.49321699254824536,
"grad_norm": 0.061279296875,
"learning_rate": 1.0705759529688435e-05,
"loss": 0.0141,
"step": 4840
},
{
"epoch": 0.4942360359212789,
"grad_norm": 0.060302734375,
"learning_rate": 1.0672831193591905e-05,
"loss": 0.0147,
"step": 4850
},
{
"epoch": 0.49525507929431245,
"grad_norm": 0.035400390625,
"learning_rate": 1.0639895527325018e-05,
"loss": 0.0171,
"step": 4860
},
{
"epoch": 0.49627412266734605,
"grad_norm": 0.04345703125,
"learning_rate": 1.0606952889705922e-05,
"loss": 0.0137,
"step": 4870
},
{
"epoch": 0.4972931660403796,
"grad_norm": 0.08203125,
"learning_rate": 1.0574003639628718e-05,
"loss": 0.016,
"step": 4880
},
{
"epoch": 0.49831220941341314,
"grad_norm": 0.06982421875,
"learning_rate": 1.0541048136059547e-05,
"loss": 0.0158,
"step": 4890
},
{
"epoch": 0.49933125278644674,
"grad_norm": 0.049560546875,
"learning_rate": 1.0508086738032679e-05,
"loss": 0.014,
"step": 4900
},
{
"epoch": 0.5003502961594802,
"grad_norm": 0.053466796875,
"learning_rate": 1.0475119804646596e-05,
"loss": 0.0174,
"step": 4910
},
{
"epoch": 0.5013693395325138,
"grad_norm": 0.045166015625,
"learning_rate": 1.044214769506009e-05,
"loss": 0.0159,
"step": 4920
},
{
"epoch": 0.5023883829055474,
"grad_norm": 0.034912109375,
"learning_rate": 1.0409170768488344e-05,
"loss": 0.0139,
"step": 4930
},
{
"epoch": 0.5034074262785809,
"grad_norm": 0.04541015625,
"learning_rate": 1.0376189384199023e-05,
"loss": 0.0174,
"step": 4940
},
{
"epoch": 0.5044264696516145,
"grad_norm": 0.08154296875,
"learning_rate": 1.0343203901508346e-05,
"loss": 0.0143,
"step": 4950
},
{
"epoch": 0.5054455130246481,
"grad_norm": 0.05615234375,
"learning_rate": 1.0310214679777198e-05,
"loss": 0.0178,
"step": 4960
},
{
"epoch": 0.5064645563976817,
"grad_norm": 0.037109375,
"learning_rate": 1.0277222078407186e-05,
"loss": 0.0144,
"step": 4970
},
{
"epoch": 0.5074835997707152,
"grad_norm": 0.047607421875,
"learning_rate": 1.024422645683674e-05,
"loss": 0.0144,
"step": 4980
},
{
"epoch": 0.5085026431437488,
"grad_norm": 0.059326171875,
"learning_rate": 1.0211228174537195e-05,
"loss": 0.0158,
"step": 4990
},
{
"epoch": 0.5095216865167824,
"grad_norm": 0.06298828125,
"learning_rate": 1.0178227591008878e-05,
"loss": 0.0126,
"step": 5000
},
{
"epoch": 0.5105407298898159,
"grad_norm": 0.055908203125,
"learning_rate": 1.0145225065777178e-05,
"loss": 0.0153,
"step": 5010
},
{
"epoch": 0.5115597732628495,
"grad_norm": 0.035888671875,
"learning_rate": 1.011222095838864e-05,
"loss": 0.0155,
"step": 5020
},
{
"epoch": 0.5125788166358831,
"grad_norm": 0.07568359375,
"learning_rate": 1.0079215628407048e-05,
"loss": 0.0161,
"step": 5030
},
{
"epoch": 0.5135978600089166,
"grad_norm": 0.064453125,
"learning_rate": 1.0046209435409511e-05,
"loss": 0.0155,
"step": 5040
},
{
"epoch": 0.5146169033819502,
"grad_norm": 0.04150390625,
"learning_rate": 1.0013202738982527e-05,
"loss": 0.012,
"step": 5050
},
{
"epoch": 0.5156359467549838,
"grad_norm": 0.057373046875,
"learning_rate": 9.98019589871809e-06,
"loss": 0.0181,
"step": 5060
},
{
"epoch": 0.5166549901280173,
"grad_norm": 0.05712890625,
"learning_rate": 9.94718927420976e-06,
"loss": 0.0151,
"step": 5070
},
{
"epoch": 0.5176740335010509,
"grad_norm": 0.051025390625,
"learning_rate": 9.914183225048732e-06,
"loss": 0.0149,
"step": 5080
},
{
"epoch": 0.5186930768740845,
"grad_norm": 0.046142578125,
"learning_rate": 9.881178110819954e-06,
"loss": 0.0156,
"step": 5090
},
{
"epoch": 0.519712120247118,
"grad_norm": 0.051025390625,
"learning_rate": 9.848174291098175e-06,
"loss": 0.0126,
"step": 5100
},
{
"epoch": 0.5207311636201516,
"grad_norm": 0.0625,
"learning_rate": 9.815172125444052e-06,
"loss": 0.0173,
"step": 5110
},
{
"epoch": 0.5217502069931852,
"grad_norm": 0.052978515625,
"learning_rate": 9.7821719734002e-06,
"loss": 0.0156,
"step": 5120
},
{
"epoch": 0.5227692503662187,
"grad_norm": 0.068359375,
"learning_rate": 9.749174194487323e-06,
"loss": 0.0137,
"step": 5130
},
{
"epoch": 0.5237882937392523,
"grad_norm": 0.06640625,
"learning_rate": 9.716179148200252e-06,
"loss": 0.0162,
"step": 5140
},
{
"epoch": 0.5248073371122859,
"grad_norm": 0.0732421875,
"learning_rate": 9.683187194004062e-06,
"loss": 0.0137,
"step": 5150
},
{
"epoch": 0.5258263804853194,
"grad_norm": 0.060302734375,
"learning_rate": 9.650198691330123e-06,
"loss": 0.0159,
"step": 5160
},
{
"epoch": 0.526845423858353,
"grad_norm": 0.033447265625,
"learning_rate": 9.617213999572223e-06,
"loss": 0.0141,
"step": 5170
},
{
"epoch": 0.5278644672313866,
"grad_norm": 0.04052734375,
"learning_rate": 9.584233478082615e-06,
"loss": 0.015,
"step": 5180
},
{
"epoch": 0.5288835106044201,
"grad_norm": 0.04736328125,
"learning_rate": 9.551257486168128e-06,
"loss": 0.0157,
"step": 5190
},
{
"epoch": 0.5299025539774537,
"grad_norm": 0.045166015625,
"learning_rate": 9.518286383086238e-06,
"loss": 0.0124,
"step": 5200
},
{
"epoch": 0.5309215973504873,
"grad_norm": 0.037109375,
"learning_rate": 9.485320528041171e-06,
"loss": 0.0157,
"step": 5210
},
{
"epoch": 0.5319406407235208,
"grad_norm": 0.034423828125,
"learning_rate": 9.45236028017996e-06,
"loss": 0.0128,
"step": 5220
},
{
"epoch": 0.5329596840965544,
"grad_norm": 0.130859375,
"learning_rate": 9.419405998588565e-06,
"loss": 0.0154,
"step": 5230
},
{
"epoch": 0.533978727469588,
"grad_norm": 0.055419921875,
"learning_rate": 9.386458042287941e-06,
"loss": 0.0144,
"step": 5240
},
{
"epoch": 0.5349977708426215,
"grad_norm": 0.051513671875,
"learning_rate": 9.353516770230134e-06,
"loss": 0.0127,
"step": 5250
},
{
"epoch": 0.5360168142156551,
"grad_norm": 0.0888671875,
"learning_rate": 9.320582541294365e-06,
"loss": 0.0162,
"step": 5260
},
{
"epoch": 0.5370358575886887,
"grad_norm": 0.05078125,
"learning_rate": 9.287655714283126e-06,
"loss": 0.0141,
"step": 5270
},
{
"epoch": 0.5380549009617221,
"grad_norm": 0.045166015625,
"learning_rate": 9.254736647918266e-06,
"loss": 0.0137,
"step": 5280
},
{
"epoch": 0.5390739443347558,
"grad_norm": 0.03955078125,
"learning_rate": 9.221825700837097e-06,
"loss": 0.0159,
"step": 5290
},
{
"epoch": 0.5400929877077894,
"grad_norm": 0.05859375,
"learning_rate": 9.188923231588451e-06,
"loss": 0.0125,
"step": 5300
},
{
"epoch": 0.5411120310808228,
"grad_norm": 0.044677734375,
"learning_rate": 9.156029598628829e-06,
"loss": 0.0168,
"step": 5310
},
{
"epoch": 0.5421310744538564,
"grad_norm": 0.05322265625,
"learning_rate": 9.123145160318441e-06,
"loss": 0.0154,
"step": 5320
},
{
"epoch": 0.54315011782689,
"grad_norm": 0.03857421875,
"learning_rate": 9.090270274917331e-06,
"loss": 0.0161,
"step": 5330
},
{
"epoch": 0.5441691611999235,
"grad_norm": 0.034423828125,
"learning_rate": 9.05740530058148e-06,
"loss": 0.0146,
"step": 5340
},
{
"epoch": 0.5451882045729571,
"grad_norm": 0.041748046875,
"learning_rate": 9.024550595358874e-06,
"loss": 0.0124,
"step": 5350
},
{
"epoch": 0.5462072479459907,
"grad_norm": 0.07080078125,
"learning_rate": 8.991706517185641e-06,
"loss": 0.0172,
"step": 5360
},
{
"epoch": 0.5472262913190242,
"grad_norm": 0.08544921875,
"learning_rate": 8.958873423882118e-06,
"loss": 0.0158,
"step": 5370
},
{
"epoch": 0.5482453346920578,
"grad_norm": 0.0625,
"learning_rate": 8.92605167314898e-06,
"loss": 0.0146,
"step": 5380
},
{
"epoch": 0.5492643780650914,
"grad_norm": 0.0625,
"learning_rate": 8.893241622563314e-06,
"loss": 0.0173,
"step": 5390
},
{
"epoch": 0.5502834214381249,
"grad_norm": 0.047119140625,
"learning_rate": 8.860443629574755e-06,
"loss": 0.0113,
"step": 5400
},
{
"epoch": 0.5513024648111585,
"grad_norm": 0.040283203125,
"learning_rate": 8.827658051501565e-06,
"loss": 0.0166,
"step": 5410
},
{
"epoch": 0.5523215081841921,
"grad_norm": 0.0311279296875,
"learning_rate": 8.794885245526764e-06,
"loss": 0.0169,
"step": 5420
},
{
"epoch": 0.5533405515572256,
"grad_norm": 0.059814453125,
"learning_rate": 8.76212556869421e-06,
"loss": 0.0129,
"step": 5430
},
{
"epoch": 0.5543595949302592,
"grad_norm": 0.057373046875,
"learning_rate": 8.72937937790474e-06,
"loss": 0.0166,
"step": 5440
},
{
"epoch": 0.5553786383032928,
"grad_norm": 0.07373046875,
"learning_rate": 8.696647029912255e-06,
"loss": 0.0136,
"step": 5450
},
{
"epoch": 0.5563976816763263,
"grad_norm": 0.034423828125,
"learning_rate": 8.66392888131986e-06,
"loss": 0.0177,
"step": 5460
},
{
"epoch": 0.5574167250493599,
"grad_norm": 0.06689453125,
"learning_rate": 8.631225288575946e-06,
"loss": 0.0143,
"step": 5470
},
{
"epoch": 0.5584357684223935,
"grad_norm": 0.0546875,
"learning_rate": 8.598536607970345e-06,
"loss": 0.0144,
"step": 5480
},
{
"epoch": 0.559454811795427,
"grad_norm": 0.05029296875,
"learning_rate": 8.56586319563041e-06,
"loss": 0.016,
"step": 5490
},
{
"epoch": 0.5604738551684606,
"grad_norm": 0.0517578125,
"learning_rate": 8.533205407517167e-06,
"loss": 0.0129,
"step": 5500
},
{
"epoch": 0.5614928985414942,
"grad_norm": 0.047119140625,
"learning_rate": 8.500563599421413e-06,
"loss": 0.0157,
"step": 5510
},
{
"epoch": 0.5625119419145277,
"grad_norm": 0.078125,
"learning_rate": 8.467938126959867e-06,
"loss": 0.0148,
"step": 5520
},
{
"epoch": 0.5635309852875613,
"grad_norm": 0.055908203125,
"learning_rate": 8.435329345571256e-06,
"loss": 0.0156,
"step": 5530
},
{
"epoch": 0.5645500286605949,
"grad_norm": 0.038330078125,
"learning_rate": 8.402737610512484e-06,
"loss": 0.0151,
"step": 5540
},
{
"epoch": 0.5655690720336284,
"grad_norm": 0.05078125,
"learning_rate": 8.370163276854737e-06,
"loss": 0.015,
"step": 5550
},
{
"epoch": 0.566588115406662,
"grad_norm": 0.05908203125,
"learning_rate": 8.337606699479624e-06,
"loss": 0.0177,
"step": 5560
},
{
"epoch": 0.5676071587796956,
"grad_norm": 0.039794921875,
"learning_rate": 8.3050682330753e-06,
"loss": 0.0131,
"step": 5570
},
{
"epoch": 0.5686262021527291,
"grad_norm": 0.04541015625,
"learning_rate": 8.272548232132619e-06,
"loss": 0.0169,
"step": 5580
},
{
"epoch": 0.5696452455257627,
"grad_norm": 0.043701171875,
"learning_rate": 8.240047050941257e-06,
"loss": 0.0164,
"step": 5590
},
{
"epoch": 0.5706642888987963,
"grad_norm": 0.0849609375,
"learning_rate": 8.207565043585866e-06,
"loss": 0.013,
"step": 5600
},
{
"epoch": 0.5716833322718298,
"grad_norm": 0.033447265625,
"learning_rate": 8.17510256394219e-06,
"loss": 0.0148,
"step": 5610
},
{
"epoch": 0.5727023756448634,
"grad_norm": 0.0830078125,
"learning_rate": 8.142659965673256e-06,
"loss": 0.0146,
"step": 5620
},
{
"epoch": 0.573721419017897,
"grad_norm": 0.047607421875,
"learning_rate": 8.110237602225464e-06,
"loss": 0.0124,
"step": 5630
},
{
"epoch": 0.5747404623909305,
"grad_norm": 0.034423828125,
"learning_rate": 8.077835826824786e-06,
"loss": 0.0155,
"step": 5640
},
{
"epoch": 0.5757595057639641,
"grad_norm": 0.04541015625,
"learning_rate": 8.04545499247289e-06,
"loss": 0.0123,
"step": 5650
},
{
"epoch": 0.5767785491369977,
"grad_norm": 0.03515625,
"learning_rate": 8.013095451943297e-06,
"loss": 0.0161,
"step": 5660
},
{
"epoch": 0.5777975925100312,
"grad_norm": 0.05517578125,
"learning_rate": 7.980757557777549e-06,
"loss": 0.0141,
"step": 5670
},
{
"epoch": 0.5788166358830648,
"grad_norm": 0.07763671875,
"learning_rate": 7.948441662281359e-06,
"loss": 0.0137,
"step": 5680
},
{
"epoch": 0.5798356792560984,
"grad_norm": 0.06298828125,
"learning_rate": 7.916148117520776e-06,
"loss": 0.0154,
"step": 5690
},
{
"epoch": 0.5808547226291318,
"grad_norm": 0.037841796875,
"learning_rate": 7.883877275318345e-06,
"loss": 0.0129,
"step": 5700
},
{
"epoch": 0.5818737660021654,
"grad_norm": 0.08935546875,
"learning_rate": 7.851629487249281e-06,
"loss": 0.0163,
"step": 5710
},
{
"epoch": 0.582892809375199,
"grad_norm": 0.050048828125,
"learning_rate": 7.819405104637634e-06,
"loss": 0.014,
"step": 5720
},
{
"epoch": 0.5839118527482327,
"grad_norm": 0.0634765625,
"learning_rate": 7.787204478552468e-06,
"loss": 0.0134,
"step": 5730
},
{
"epoch": 0.5849308961212661,
"grad_norm": 0.060546875,
"learning_rate": 7.755027959804019e-06,
"loss": 0.0165,
"step": 5740
},
{
"epoch": 0.5859499394942997,
"grad_norm": 0.06298828125,
"learning_rate": 7.722875898939897e-06,
"loss": 0.0126,
"step": 5750
},
{
"epoch": 0.5869689828673333,
"grad_norm": 0.09423828125,
"learning_rate": 7.690748646241248e-06,
"loss": 0.0146,
"step": 5760
},
{
"epoch": 0.5879880262403668,
"grad_norm": 0.04052734375,
"learning_rate": 7.658646551718953e-06,
"loss": 0.0153,
"step": 5770
},
{
"epoch": 0.5890070696134004,
"grad_norm": 0.047119140625,
"learning_rate": 7.626569965109798e-06,
"loss": 0.0128,
"step": 5780
},
{
"epoch": 0.590026112986434,
"grad_norm": 0.0654296875,
"learning_rate": 7.59451923587268e-06,
"loss": 0.0149,
"step": 5790
},
{
"epoch": 0.5910451563594675,
"grad_norm": 0.06982421875,
"learning_rate": 7.5624947131847826e-06,
"loss": 0.0128,
"step": 5800
},
{
"epoch": 0.5920641997325011,
"grad_norm": 0.049072265625,
"learning_rate": 7.530496745937793e-06,
"loss": 0.0176,
"step": 5810
},
{
"epoch": 0.5930832431055347,
"grad_norm": 0.040283203125,
"learning_rate": 7.498525682734081e-06,
"loss": 0.0142,
"step": 5820
},
{
"epoch": 0.5941022864785682,
"grad_norm": 0.03564453125,
"learning_rate": 7.466581871882921e-06,
"loss": 0.0149,
"step": 5830
},
{
"epoch": 0.5951213298516018,
"grad_norm": 0.045654296875,
"learning_rate": 7.434665661396667e-06,
"loss": 0.0145,
"step": 5840
},
{
"epoch": 0.5961403732246354,
"grad_norm": 0.051025390625,
"learning_rate": 7.402777398987002e-06,
"loss": 0.0114,
"step": 5850
},
{
"epoch": 0.5971594165976689,
"grad_norm": 0.0634765625,
"learning_rate": 7.370917432061113e-06,
"loss": 0.0144,
"step": 5860
},
{
"epoch": 0.5981784599707025,
"grad_norm": 0.041748046875,
"learning_rate": 7.3390861077179335e-06,
"loss": 0.0136,
"step": 5870
},
{
"epoch": 0.5991975033437361,
"grad_norm": 0.052490234375,
"learning_rate": 7.307283772744334e-06,
"loss": 0.0133,
"step": 5880
},
{
"epoch": 0.6002165467167696,
"grad_norm": 0.046875,
"learning_rate": 7.275510773611374e-06,
"loss": 0.0137,
"step": 5890
},
{
"epoch": 0.6012355900898032,
"grad_norm": 0.0693359375,
"learning_rate": 7.243767456470505e-06,
"loss": 0.0113,
"step": 5900
},
{
"epoch": 0.6022546334628368,
"grad_norm": 0.041748046875,
"learning_rate": 7.21205416714982e-06,
"loss": 0.0149,
"step": 5910
},
{
"epoch": 0.6032736768358703,
"grad_norm": 0.06591796875,
"learning_rate": 7.180371251150249e-06,
"loss": 0.0132,
"step": 5920
},
{
"epoch": 0.6042927202089039,
"grad_norm": 0.046630859375,
"learning_rate": 7.1487190536418505e-06,
"loss": 0.0137,
"step": 5930
},
{
"epoch": 0.6053117635819375,
"grad_norm": 0.036376953125,
"learning_rate": 7.117097919459992e-06,
"loss": 0.0143,
"step": 5940
},
{
"epoch": 0.606330806954971,
"grad_norm": 0.0927734375,
"learning_rate": 7.085508193101639e-06,
"loss": 0.0129,
"step": 5950
},
{
"epoch": 0.6073498503280046,
"grad_norm": 0.0625,
"learning_rate": 7.053950218721575e-06,
"loss": 0.0173,
"step": 5960
},
{
"epoch": 0.6083688937010382,
"grad_norm": 0.03271484375,
"learning_rate": 7.02242434012867e-06,
"loss": 0.0142,
"step": 5970
},
{
"epoch": 0.6093879370740717,
"grad_norm": 0.061279296875,
"learning_rate": 6.990930900782112e-06,
"loss": 0.0138,
"step": 5980
},
{
"epoch": 0.6104069804471053,
"grad_norm": 0.047607421875,
"learning_rate": 6.95947024378769e-06,
"loss": 0.0145,
"step": 5990
},
{
"epoch": 0.6114260238201389,
"grad_norm": 0.05224609375,
"learning_rate": 6.928042711894047e-06,
"loss": 0.0122,
"step": 6000
}
],
"logging_steps": 10,
"max_steps": 9813,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.8788170935536976e+19,
"train_batch_size": 3,
"trial_name": null,
"trial_params": null
}