CodCodingCode's picture
Upload folder using huggingface_hub
fa0bc66 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.957123699757167,
"eval_steps": 500,
"global_step": 13500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0014497481062665362,
"grad_norm": 1440.0,
"learning_rate": 3.6000000000000003e-06,
"loss": 4.5839,
"step": 10
},
{
"epoch": 0.0028994962125330724,
"grad_norm": 964.0,
"learning_rate": 7.600000000000001e-06,
"loss": 2.7285,
"step": 20
},
{
"epoch": 0.004349244318799609,
"grad_norm": 2224.0,
"learning_rate": 1.16e-05,
"loss": 1.881,
"step": 30
},
{
"epoch": 0.005798992425066145,
"grad_norm": 9216.0,
"learning_rate": 1.5600000000000003e-05,
"loss": 1.4348,
"step": 40
},
{
"epoch": 0.007248740531332681,
"grad_norm": 19584.0,
"learning_rate": 1.9600000000000002e-05,
"loss": 1.5585,
"step": 50
},
{
"epoch": 0.008698488637599217,
"grad_norm": 3.953125,
"learning_rate": 2.36e-05,
"loss": 1.0467,
"step": 60
},
{
"epoch": 0.010148236743865753,
"grad_norm": 1.296875,
"learning_rate": 2.76e-05,
"loss": 0.8164,
"step": 70
},
{
"epoch": 0.01159798485013229,
"grad_norm": 1.328125,
"learning_rate": 3.16e-05,
"loss": 0.7338,
"step": 80
},
{
"epoch": 0.013047732956398826,
"grad_norm": 180.0,
"learning_rate": 3.5600000000000005e-05,
"loss": 0.6724,
"step": 90
},
{
"epoch": 0.014497481062665362,
"grad_norm": 1.0859375,
"learning_rate": 3.96e-05,
"loss": 0.6735,
"step": 100
},
{
"epoch": 0.015947229168931897,
"grad_norm": 1.1953125,
"learning_rate": 4.360000000000001e-05,
"loss": 0.6787,
"step": 110
},
{
"epoch": 0.017396977275198434,
"grad_norm": 1.234375,
"learning_rate": 4.7600000000000005e-05,
"loss": 0.6796,
"step": 120
},
{
"epoch": 0.01884672538146497,
"grad_norm": 0.9765625,
"learning_rate": 5.160000000000001e-05,
"loss": 0.6814,
"step": 130
},
{
"epoch": 0.020296473487731507,
"grad_norm": 0.7578125,
"learning_rate": 5.56e-05,
"loss": 0.6821,
"step": 140
},
{
"epoch": 0.02174622159399804,
"grad_norm": 1.0390625,
"learning_rate": 5.9600000000000005e-05,
"loss": 0.7016,
"step": 150
},
{
"epoch": 0.02319596970026458,
"grad_norm": 0.9375,
"learning_rate": 6.360000000000001e-05,
"loss": 0.674,
"step": 160
},
{
"epoch": 0.024645717806531114,
"grad_norm": 0.8828125,
"learning_rate": 6.76e-05,
"loss": 0.6697,
"step": 170
},
{
"epoch": 0.02609546591279765,
"grad_norm": 0.8828125,
"learning_rate": 7.16e-05,
"loss": 0.6434,
"step": 180
},
{
"epoch": 0.027545214019064186,
"grad_norm": 6.90625,
"learning_rate": 7.560000000000001e-05,
"loss": 0.639,
"step": 190
},
{
"epoch": 0.028994962125330724,
"grad_norm": 0.87109375,
"learning_rate": 7.960000000000001e-05,
"loss": 0.6754,
"step": 200
},
{
"epoch": 0.03044471023159726,
"grad_norm": 0.88671875,
"learning_rate": 7.994782608695653e-05,
"loss": 0.648,
"step": 210
},
{
"epoch": 0.03189445833786379,
"grad_norm": 0.92578125,
"learning_rate": 7.988985507246377e-05,
"loss": 0.642,
"step": 220
},
{
"epoch": 0.03334420644413033,
"grad_norm": 0.88671875,
"learning_rate": 7.983188405797102e-05,
"loss": 0.6508,
"step": 230
},
{
"epoch": 0.03479395455039687,
"grad_norm": 0.8515625,
"learning_rate": 7.977391304347826e-05,
"loss": 0.6517,
"step": 240
},
{
"epoch": 0.03624370265666341,
"grad_norm": 0.671875,
"learning_rate": 7.971594202898552e-05,
"loss": 0.6262,
"step": 250
},
{
"epoch": 0.03769345076292994,
"grad_norm": 0.8125,
"learning_rate": 7.965797101449276e-05,
"loss": 0.65,
"step": 260
},
{
"epoch": 0.039143198869196476,
"grad_norm": 0.79296875,
"learning_rate": 7.960000000000001e-05,
"loss": 0.6624,
"step": 270
},
{
"epoch": 0.040592946975463014,
"grad_norm": 0.765625,
"learning_rate": 7.954202898550725e-05,
"loss": 0.5856,
"step": 280
},
{
"epoch": 0.04204269508172955,
"grad_norm": 0.67578125,
"learning_rate": 7.948405797101449e-05,
"loss": 0.573,
"step": 290
},
{
"epoch": 0.04349244318799608,
"grad_norm": 0.7578125,
"learning_rate": 7.942608695652174e-05,
"loss": 0.6033,
"step": 300
},
{
"epoch": 0.04494219129426262,
"grad_norm": 0.71484375,
"learning_rate": 7.9368115942029e-05,
"loss": 0.5779,
"step": 310
},
{
"epoch": 0.04639193940052916,
"grad_norm": 0.69140625,
"learning_rate": 7.931014492753624e-05,
"loss": 0.5705,
"step": 320
},
{
"epoch": 0.047841687506795696,
"grad_norm": 0.6796875,
"learning_rate": 7.925217391304349e-05,
"loss": 0.6448,
"step": 330
},
{
"epoch": 0.04929143561306223,
"grad_norm": 0.79296875,
"learning_rate": 7.919420289855073e-05,
"loss": 0.603,
"step": 340
},
{
"epoch": 0.050741183719328765,
"grad_norm": 0.7265625,
"learning_rate": 7.913623188405797e-05,
"loss": 0.6359,
"step": 350
},
{
"epoch": 0.0521909318255953,
"grad_norm": 0.8359375,
"learning_rate": 7.907826086956523e-05,
"loss": 0.5923,
"step": 360
},
{
"epoch": 0.05364067993186184,
"grad_norm": 0.7109375,
"learning_rate": 7.902028985507247e-05,
"loss": 0.6026,
"step": 370
},
{
"epoch": 0.05509042803812837,
"grad_norm": 1.5546875,
"learning_rate": 7.896231884057972e-05,
"loss": 0.5666,
"step": 380
},
{
"epoch": 0.05654017614439491,
"grad_norm": 0.609375,
"learning_rate": 7.890434782608697e-05,
"loss": 0.5933,
"step": 390
},
{
"epoch": 0.05798992425066145,
"grad_norm": 0.80859375,
"learning_rate": 7.884637681159421e-05,
"loss": 0.5926,
"step": 400
},
{
"epoch": 0.059439672356927986,
"grad_norm": 0.77734375,
"learning_rate": 7.878840579710145e-05,
"loss": 0.6175,
"step": 410
},
{
"epoch": 0.06088942046319452,
"grad_norm": 0.703125,
"learning_rate": 7.873043478260869e-05,
"loss": 0.573,
"step": 420
},
{
"epoch": 0.062339168569461055,
"grad_norm": 0.6484375,
"learning_rate": 7.867246376811595e-05,
"loss": 0.5003,
"step": 430
},
{
"epoch": 0.06378891667572759,
"grad_norm": 0.67578125,
"learning_rate": 7.86144927536232e-05,
"loss": 0.5835,
"step": 440
},
{
"epoch": 0.06523866478199412,
"grad_norm": 0.7265625,
"learning_rate": 7.855652173913044e-05,
"loss": 0.5603,
"step": 450
},
{
"epoch": 0.06668841288826066,
"grad_norm": 0.6640625,
"learning_rate": 7.84985507246377e-05,
"loss": 0.5692,
"step": 460
},
{
"epoch": 0.0681381609945272,
"grad_norm": 0.70703125,
"learning_rate": 7.844057971014493e-05,
"loss": 0.5619,
"step": 470
},
{
"epoch": 0.06958790910079374,
"grad_norm": 1.234375,
"learning_rate": 7.838260869565217e-05,
"loss": 0.5928,
"step": 480
},
{
"epoch": 0.07103765720706028,
"grad_norm": 0.77734375,
"learning_rate": 7.832463768115943e-05,
"loss": 0.5647,
"step": 490
},
{
"epoch": 0.07248740531332681,
"grad_norm": 0.75,
"learning_rate": 7.826666666666668e-05,
"loss": 0.5375,
"step": 500
},
{
"epoch": 0.07393715341959335,
"grad_norm": 0.85546875,
"learning_rate": 7.820869565217392e-05,
"loss": 0.5843,
"step": 510
},
{
"epoch": 0.07538690152585988,
"grad_norm": 0.71484375,
"learning_rate": 7.815072463768117e-05,
"loss": 0.5457,
"step": 520
},
{
"epoch": 0.07683664963212641,
"grad_norm": 0.6796875,
"learning_rate": 7.809275362318841e-05,
"loss": 0.5819,
"step": 530
},
{
"epoch": 0.07828639773839295,
"grad_norm": 0.7734375,
"learning_rate": 7.803478260869565e-05,
"loss": 0.5278,
"step": 540
},
{
"epoch": 0.07973614584465949,
"grad_norm": 0.7421875,
"learning_rate": 7.797681159420291e-05,
"loss": 0.5375,
"step": 550
},
{
"epoch": 0.08118589395092603,
"grad_norm": 0.62890625,
"learning_rate": 7.791884057971015e-05,
"loss": 0.4892,
"step": 560
},
{
"epoch": 0.08263564205719257,
"grad_norm": 0.76171875,
"learning_rate": 7.78608695652174e-05,
"loss": 0.5186,
"step": 570
},
{
"epoch": 0.0840853901634591,
"grad_norm": 0.63671875,
"learning_rate": 7.780289855072464e-05,
"loss": 0.5002,
"step": 580
},
{
"epoch": 0.08553513826972564,
"grad_norm": 0.80078125,
"learning_rate": 7.77449275362319e-05,
"loss": 0.518,
"step": 590
},
{
"epoch": 0.08698488637599217,
"grad_norm": 0.74609375,
"learning_rate": 7.768695652173914e-05,
"loss": 0.5225,
"step": 600
},
{
"epoch": 0.0884346344822587,
"grad_norm": 0.62890625,
"learning_rate": 7.762898550724638e-05,
"loss": 0.5111,
"step": 610
},
{
"epoch": 0.08988438258852524,
"grad_norm": 0.71484375,
"learning_rate": 7.757101449275363e-05,
"loss": 0.5046,
"step": 620
},
{
"epoch": 0.09133413069479178,
"grad_norm": 0.81640625,
"learning_rate": 7.751304347826088e-05,
"loss": 0.548,
"step": 630
},
{
"epoch": 0.09278387880105832,
"grad_norm": 0.8125,
"learning_rate": 7.745507246376812e-05,
"loss": 0.4748,
"step": 640
},
{
"epoch": 0.09423362690732486,
"grad_norm": 0.671875,
"learning_rate": 7.739710144927536e-05,
"loss": 0.5359,
"step": 650
},
{
"epoch": 0.09568337501359139,
"grad_norm": 0.77734375,
"learning_rate": 7.733913043478262e-05,
"loss": 0.4731,
"step": 660
},
{
"epoch": 0.09713312311985793,
"grad_norm": 0.70703125,
"learning_rate": 7.728115942028986e-05,
"loss": 0.5306,
"step": 670
},
{
"epoch": 0.09858287122612445,
"grad_norm": 0.73046875,
"learning_rate": 7.722318840579711e-05,
"loss": 0.4978,
"step": 680
},
{
"epoch": 0.10003261933239099,
"grad_norm": 0.75390625,
"learning_rate": 7.716521739130435e-05,
"loss": 0.5092,
"step": 690
},
{
"epoch": 0.10148236743865753,
"grad_norm": 0.69140625,
"learning_rate": 7.71072463768116e-05,
"loss": 0.5119,
"step": 700
},
{
"epoch": 0.10293211554492407,
"grad_norm": 0.609375,
"learning_rate": 7.704927536231884e-05,
"loss": 0.4871,
"step": 710
},
{
"epoch": 0.1043818636511906,
"grad_norm": 0.625,
"learning_rate": 7.699130434782608e-05,
"loss": 0.5444,
"step": 720
},
{
"epoch": 0.10583161175745714,
"grad_norm": 0.65234375,
"learning_rate": 7.693333333333334e-05,
"loss": 0.4837,
"step": 730
},
{
"epoch": 0.10728135986372368,
"grad_norm": 0.64453125,
"learning_rate": 7.687536231884059e-05,
"loss": 0.4788,
"step": 740
},
{
"epoch": 0.10873110796999022,
"grad_norm": 0.71484375,
"learning_rate": 7.681739130434783e-05,
"loss": 0.4905,
"step": 750
},
{
"epoch": 0.11018085607625674,
"grad_norm": 0.6640625,
"learning_rate": 7.675942028985508e-05,
"loss": 0.4732,
"step": 760
},
{
"epoch": 0.11163060418252328,
"grad_norm": 0.66015625,
"learning_rate": 7.670144927536232e-05,
"loss": 0.4817,
"step": 770
},
{
"epoch": 0.11308035228878982,
"grad_norm": 0.71875,
"learning_rate": 7.664347826086957e-05,
"loss": 0.4723,
"step": 780
},
{
"epoch": 0.11453010039505636,
"grad_norm": 0.6796875,
"learning_rate": 7.658550724637682e-05,
"loss": 0.5013,
"step": 790
},
{
"epoch": 0.1159798485013229,
"grad_norm": 0.7109375,
"learning_rate": 7.652753623188406e-05,
"loss": 0.4616,
"step": 800
},
{
"epoch": 0.11742959660758943,
"grad_norm": 0.6328125,
"learning_rate": 7.646956521739131e-05,
"loss": 0.4289,
"step": 810
},
{
"epoch": 0.11887934471385597,
"grad_norm": 0.73046875,
"learning_rate": 7.641159420289857e-05,
"loss": 0.4697,
"step": 820
},
{
"epoch": 0.12032909282012251,
"grad_norm": 0.6796875,
"learning_rate": 7.63536231884058e-05,
"loss": 0.4624,
"step": 830
},
{
"epoch": 0.12177884092638903,
"grad_norm": 0.7890625,
"learning_rate": 7.629565217391305e-05,
"loss": 0.4569,
"step": 840
},
{
"epoch": 0.12322858903265557,
"grad_norm": 0.74609375,
"learning_rate": 7.62376811594203e-05,
"loss": 0.4674,
"step": 850
},
{
"epoch": 0.12467833713892211,
"grad_norm": 0.73828125,
"learning_rate": 7.617971014492754e-05,
"loss": 0.4378,
"step": 860
},
{
"epoch": 0.12612808524518865,
"grad_norm": 0.84765625,
"learning_rate": 7.61217391304348e-05,
"loss": 0.4977,
"step": 870
},
{
"epoch": 0.12757783335145517,
"grad_norm": 0.73046875,
"learning_rate": 7.606376811594203e-05,
"loss": 0.4422,
"step": 880
},
{
"epoch": 0.12902758145772172,
"grad_norm": 0.671875,
"learning_rate": 7.600579710144929e-05,
"loss": 0.4379,
"step": 890
},
{
"epoch": 0.13047732956398825,
"grad_norm": 0.7734375,
"learning_rate": 7.594782608695653e-05,
"loss": 0.4527,
"step": 900
},
{
"epoch": 0.1319270776702548,
"grad_norm": 0.6171875,
"learning_rate": 7.588985507246377e-05,
"loss": 0.4622,
"step": 910
},
{
"epoch": 0.13337682577652132,
"grad_norm": 0.75,
"learning_rate": 7.583188405797102e-05,
"loss": 0.4478,
"step": 920
},
{
"epoch": 0.13482657388278788,
"grad_norm": 0.64453125,
"learning_rate": 7.577391304347827e-05,
"loss": 0.451,
"step": 930
},
{
"epoch": 0.1362763219890544,
"grad_norm": 0.6953125,
"learning_rate": 7.571594202898551e-05,
"loss": 0.4115,
"step": 940
},
{
"epoch": 0.13772607009532095,
"grad_norm": 0.65625,
"learning_rate": 7.565797101449277e-05,
"loss": 0.4325,
"step": 950
},
{
"epoch": 0.13917581820158748,
"grad_norm": 0.734375,
"learning_rate": 7.560000000000001e-05,
"loss": 0.4193,
"step": 960
},
{
"epoch": 0.140625566307854,
"grad_norm": 0.734375,
"learning_rate": 7.554202898550725e-05,
"loss": 0.4061,
"step": 970
},
{
"epoch": 0.14207531441412055,
"grad_norm": 0.75,
"learning_rate": 7.54840579710145e-05,
"loss": 0.4032,
"step": 980
},
{
"epoch": 0.14352506252038708,
"grad_norm": 0.6875,
"learning_rate": 7.542608695652174e-05,
"loss": 0.4529,
"step": 990
},
{
"epoch": 0.14497481062665363,
"grad_norm": 0.70703125,
"learning_rate": 7.5368115942029e-05,
"loss": 0.4376,
"step": 1000
},
{
"epoch": 0.14642455873292015,
"grad_norm": 0.625,
"learning_rate": 7.531014492753624e-05,
"loss": 0.4393,
"step": 1010
},
{
"epoch": 0.1478743068391867,
"grad_norm": 0.62109375,
"learning_rate": 7.525217391304349e-05,
"loss": 0.4205,
"step": 1020
},
{
"epoch": 0.14932405494545323,
"grad_norm": 0.62109375,
"learning_rate": 7.519420289855073e-05,
"loss": 0.431,
"step": 1030
},
{
"epoch": 0.15077380305171975,
"grad_norm": 0.6328125,
"learning_rate": 7.513623188405797e-05,
"loss": 0.423,
"step": 1040
},
{
"epoch": 0.1522235511579863,
"grad_norm": 0.69140625,
"learning_rate": 7.507826086956522e-05,
"loss": 0.4188,
"step": 1050
},
{
"epoch": 0.15367329926425283,
"grad_norm": 0.5703125,
"learning_rate": 7.502028985507248e-05,
"loss": 0.4175,
"step": 1060
},
{
"epoch": 0.15512304737051938,
"grad_norm": 0.76953125,
"learning_rate": 7.496231884057972e-05,
"loss": 0.4026,
"step": 1070
},
{
"epoch": 0.1565727954767859,
"grad_norm": 0.671875,
"learning_rate": 7.490434782608696e-05,
"loss": 0.3961,
"step": 1080
},
{
"epoch": 0.15802254358305245,
"grad_norm": 0.69921875,
"learning_rate": 7.484637681159421e-05,
"loss": 0.4139,
"step": 1090
},
{
"epoch": 0.15947229168931898,
"grad_norm": 0.75390625,
"learning_rate": 7.478840579710145e-05,
"loss": 0.416,
"step": 1100
},
{
"epoch": 0.16092203979558553,
"grad_norm": 0.7109375,
"learning_rate": 7.47304347826087e-05,
"loss": 0.4091,
"step": 1110
},
{
"epoch": 0.16237178790185205,
"grad_norm": 0.74609375,
"learning_rate": 7.467246376811596e-05,
"loss": 0.4193,
"step": 1120
},
{
"epoch": 0.16382153600811858,
"grad_norm": 0.66796875,
"learning_rate": 7.46144927536232e-05,
"loss": 0.3983,
"step": 1130
},
{
"epoch": 0.16527128411438513,
"grad_norm": 0.75,
"learning_rate": 7.455652173913044e-05,
"loss": 0.4066,
"step": 1140
},
{
"epoch": 0.16672103222065165,
"grad_norm": 0.6796875,
"learning_rate": 7.449855072463768e-05,
"loss": 0.3925,
"step": 1150
},
{
"epoch": 0.1681707803269182,
"grad_norm": 0.7109375,
"learning_rate": 7.444057971014493e-05,
"loss": 0.3824,
"step": 1160
},
{
"epoch": 0.16962052843318473,
"grad_norm": 0.6484375,
"learning_rate": 7.438260869565218e-05,
"loss": 0.425,
"step": 1170
},
{
"epoch": 0.17107027653945128,
"grad_norm": 0.83203125,
"learning_rate": 7.432463768115942e-05,
"loss": 0.4115,
"step": 1180
},
{
"epoch": 0.1725200246457178,
"grad_norm": 0.71875,
"learning_rate": 7.426666666666668e-05,
"loss": 0.4154,
"step": 1190
},
{
"epoch": 0.17396977275198433,
"grad_norm": 0.71484375,
"learning_rate": 7.420869565217392e-05,
"loss": 0.3967,
"step": 1200
},
{
"epoch": 0.17541952085825088,
"grad_norm": 0.7265625,
"learning_rate": 7.415072463768116e-05,
"loss": 0.4045,
"step": 1210
},
{
"epoch": 0.1768692689645174,
"grad_norm": 0.65625,
"learning_rate": 7.409275362318841e-05,
"loss": 0.3893,
"step": 1220
},
{
"epoch": 0.17831901707078396,
"grad_norm": 0.625,
"learning_rate": 7.403478260869565e-05,
"loss": 0.3825,
"step": 1230
},
{
"epoch": 0.17976876517705048,
"grad_norm": 0.87109375,
"learning_rate": 7.39768115942029e-05,
"loss": 0.4033,
"step": 1240
},
{
"epoch": 0.18121851328331703,
"grad_norm": 0.80859375,
"learning_rate": 7.391884057971016e-05,
"loss": 0.4061,
"step": 1250
},
{
"epoch": 0.18266826138958356,
"grad_norm": 0.609375,
"learning_rate": 7.38608695652174e-05,
"loss": 0.4118,
"step": 1260
},
{
"epoch": 0.18411800949585008,
"grad_norm": 0.65625,
"learning_rate": 7.380289855072464e-05,
"loss": 0.4136,
"step": 1270
},
{
"epoch": 0.18556775760211663,
"grad_norm": 0.6875,
"learning_rate": 7.374492753623189e-05,
"loss": 0.3825,
"step": 1280
},
{
"epoch": 0.18701750570838316,
"grad_norm": 0.6171875,
"learning_rate": 7.368695652173913e-05,
"loss": 0.3794,
"step": 1290
},
{
"epoch": 0.1884672538146497,
"grad_norm": 0.59765625,
"learning_rate": 7.362898550724639e-05,
"loss": 0.3556,
"step": 1300
},
{
"epoch": 0.18991700192091623,
"grad_norm": 0.66015625,
"learning_rate": 7.357101449275363e-05,
"loss": 0.3851,
"step": 1310
},
{
"epoch": 0.19136675002718279,
"grad_norm": 0.69140625,
"learning_rate": 7.351304347826088e-05,
"loss": 0.3701,
"step": 1320
},
{
"epoch": 0.1928164981334493,
"grad_norm": 0.65625,
"learning_rate": 7.345507246376812e-05,
"loss": 0.3508,
"step": 1330
},
{
"epoch": 0.19426624623971586,
"grad_norm": 0.59765625,
"learning_rate": 7.339710144927536e-05,
"loss": 0.3786,
"step": 1340
},
{
"epoch": 0.19571599434598239,
"grad_norm": 0.75390625,
"learning_rate": 7.333913043478261e-05,
"loss": 0.3637,
"step": 1350
},
{
"epoch": 0.1971657424522489,
"grad_norm": 0.70703125,
"learning_rate": 7.328115942028987e-05,
"loss": 0.3754,
"step": 1360
},
{
"epoch": 0.19861549055851546,
"grad_norm": 0.6796875,
"learning_rate": 7.322318840579711e-05,
"loss": 0.375,
"step": 1370
},
{
"epoch": 0.20006523866478199,
"grad_norm": 0.85546875,
"learning_rate": 7.316521739130436e-05,
"loss": 0.3964,
"step": 1380
},
{
"epoch": 0.20151498677104854,
"grad_norm": 0.74609375,
"learning_rate": 7.31072463768116e-05,
"loss": 0.3686,
"step": 1390
},
{
"epoch": 0.20296473487731506,
"grad_norm": 0.7265625,
"learning_rate": 7.304927536231884e-05,
"loss": 0.3624,
"step": 1400
},
{
"epoch": 0.2044144829835816,
"grad_norm": 0.75390625,
"learning_rate": 7.29913043478261e-05,
"loss": 0.3492,
"step": 1410
},
{
"epoch": 0.20586423108984814,
"grad_norm": 0.62890625,
"learning_rate": 7.293333333333334e-05,
"loss": 0.3608,
"step": 1420
},
{
"epoch": 0.20731397919611466,
"grad_norm": 0.70703125,
"learning_rate": 7.287536231884059e-05,
"loss": 0.3385,
"step": 1430
},
{
"epoch": 0.2087637273023812,
"grad_norm": 0.765625,
"learning_rate": 7.281739130434783e-05,
"loss": 0.3801,
"step": 1440
},
{
"epoch": 0.21021347540864774,
"grad_norm": 0.68359375,
"learning_rate": 7.275942028985508e-05,
"loss": 0.3507,
"step": 1450
},
{
"epoch": 0.2116632235149143,
"grad_norm": 0.56640625,
"learning_rate": 7.270144927536232e-05,
"loss": 0.3745,
"step": 1460
},
{
"epoch": 0.2131129716211808,
"grad_norm": 0.71484375,
"learning_rate": 7.264347826086958e-05,
"loss": 0.3395,
"step": 1470
},
{
"epoch": 0.21456271972744737,
"grad_norm": 0.68359375,
"learning_rate": 7.258550724637682e-05,
"loss": 0.3461,
"step": 1480
},
{
"epoch": 0.2160124678337139,
"grad_norm": 0.64453125,
"learning_rate": 7.252753623188407e-05,
"loss": 0.3482,
"step": 1490
},
{
"epoch": 0.21746221593998044,
"grad_norm": 0.7421875,
"learning_rate": 7.246956521739131e-05,
"loss": 0.3175,
"step": 1500
},
{
"epoch": 0.21891196404624697,
"grad_norm": 0.6796875,
"learning_rate": 7.241159420289855e-05,
"loss": 0.3555,
"step": 1510
},
{
"epoch": 0.2203617121525135,
"grad_norm": 0.60546875,
"learning_rate": 7.23536231884058e-05,
"loss": 0.3513,
"step": 1520
},
{
"epoch": 0.22181146025878004,
"grad_norm": 0.671875,
"learning_rate": 7.229565217391304e-05,
"loss": 0.3516,
"step": 1530
},
{
"epoch": 0.22326120836504657,
"grad_norm": 0.78125,
"learning_rate": 7.22376811594203e-05,
"loss": 0.3627,
"step": 1540
},
{
"epoch": 0.22471095647131312,
"grad_norm": 0.640625,
"learning_rate": 7.217971014492755e-05,
"loss": 0.3735,
"step": 1550
},
{
"epoch": 0.22616070457757964,
"grad_norm": 0.671875,
"learning_rate": 7.212173913043479e-05,
"loss": 0.3425,
"step": 1560
},
{
"epoch": 0.2276104526838462,
"grad_norm": 0.59765625,
"learning_rate": 7.206376811594203e-05,
"loss": 0.3678,
"step": 1570
},
{
"epoch": 0.22906020079011272,
"grad_norm": 0.859375,
"learning_rate": 7.200579710144927e-05,
"loss": 0.3584,
"step": 1580
},
{
"epoch": 0.23050994889637924,
"grad_norm": 0.6484375,
"learning_rate": 7.194782608695652e-05,
"loss": 0.3318,
"step": 1590
},
{
"epoch": 0.2319596970026458,
"grad_norm": 0.625,
"learning_rate": 7.188985507246378e-05,
"loss": 0.3209,
"step": 1600
},
{
"epoch": 0.23340944510891232,
"grad_norm": 0.640625,
"learning_rate": 7.183188405797102e-05,
"loss": 0.3257,
"step": 1610
},
{
"epoch": 0.23485919321517887,
"grad_norm": 0.921875,
"learning_rate": 7.177391304347827e-05,
"loss": 0.3687,
"step": 1620
},
{
"epoch": 0.2363089413214454,
"grad_norm": 0.6796875,
"learning_rate": 7.171594202898551e-05,
"loss": 0.3659,
"step": 1630
},
{
"epoch": 0.23775868942771194,
"grad_norm": 0.82421875,
"learning_rate": 7.165797101449275e-05,
"loss": 0.3294,
"step": 1640
},
{
"epoch": 0.23920843753397847,
"grad_norm": 0.67578125,
"learning_rate": 7.16e-05,
"loss": 0.3564,
"step": 1650
},
{
"epoch": 0.24065818564024502,
"grad_norm": 0.74609375,
"learning_rate": 7.154202898550725e-05,
"loss": 0.3773,
"step": 1660
},
{
"epoch": 0.24210793374651154,
"grad_norm": 0.78125,
"learning_rate": 7.14840579710145e-05,
"loss": 0.3211,
"step": 1670
},
{
"epoch": 0.24355768185277807,
"grad_norm": 0.73828125,
"learning_rate": 7.142608695652175e-05,
"loss": 0.3304,
"step": 1680
},
{
"epoch": 0.24500742995904462,
"grad_norm": 0.76953125,
"learning_rate": 7.136811594202899e-05,
"loss": 0.3374,
"step": 1690
},
{
"epoch": 0.24645717806531114,
"grad_norm": 0.77734375,
"learning_rate": 7.131014492753623e-05,
"loss": 0.3085,
"step": 1700
},
{
"epoch": 0.2479069261715777,
"grad_norm": 0.66015625,
"learning_rate": 7.125217391304349e-05,
"loss": 0.3083,
"step": 1710
},
{
"epoch": 0.24935667427784422,
"grad_norm": 0.76953125,
"learning_rate": 7.119420289855073e-05,
"loss": 0.3387,
"step": 1720
},
{
"epoch": 0.25080642238411077,
"grad_norm": 0.6328125,
"learning_rate": 7.113623188405798e-05,
"loss": 0.3419,
"step": 1730
},
{
"epoch": 0.2522561704903773,
"grad_norm": 0.59765625,
"learning_rate": 7.107826086956523e-05,
"loss": 0.3167,
"step": 1740
},
{
"epoch": 0.2537059185966438,
"grad_norm": 0.64453125,
"learning_rate": 7.102028985507247e-05,
"loss": 0.3177,
"step": 1750
},
{
"epoch": 0.25515566670291034,
"grad_norm": 0.65234375,
"learning_rate": 7.096231884057971e-05,
"loss": 0.3447,
"step": 1760
},
{
"epoch": 0.2566054148091769,
"grad_norm": 1.0,
"learning_rate": 7.090434782608695e-05,
"loss": 0.3327,
"step": 1770
},
{
"epoch": 0.25805516291544345,
"grad_norm": 0.7265625,
"learning_rate": 7.084637681159421e-05,
"loss": 0.3155,
"step": 1780
},
{
"epoch": 0.25950491102170997,
"grad_norm": 0.64453125,
"learning_rate": 7.078840579710146e-05,
"loss": 0.3513,
"step": 1790
},
{
"epoch": 0.2609546591279765,
"grad_norm": 0.67578125,
"learning_rate": 7.07304347826087e-05,
"loss": 0.302,
"step": 1800
},
{
"epoch": 0.2624044072342431,
"grad_norm": 0.625,
"learning_rate": 7.067246376811595e-05,
"loss": 0.3053,
"step": 1810
},
{
"epoch": 0.2638541553405096,
"grad_norm": 0.6171875,
"learning_rate": 7.06144927536232e-05,
"loss": 0.3099,
"step": 1820
},
{
"epoch": 0.2653039034467761,
"grad_norm": 0.59765625,
"learning_rate": 7.055652173913044e-05,
"loss": 0.3328,
"step": 1830
},
{
"epoch": 0.26675365155304265,
"grad_norm": 0.6328125,
"learning_rate": 7.049855072463769e-05,
"loss": 0.3067,
"step": 1840
},
{
"epoch": 0.26820339965930917,
"grad_norm": 0.67578125,
"learning_rate": 7.044057971014493e-05,
"loss": 0.3045,
"step": 1850
},
{
"epoch": 0.26965314776557575,
"grad_norm": 0.51953125,
"learning_rate": 7.038260869565218e-05,
"loss": 0.3315,
"step": 1860
},
{
"epoch": 0.2711028958718423,
"grad_norm": 0.66796875,
"learning_rate": 7.032463768115942e-05,
"loss": 0.3216,
"step": 1870
},
{
"epoch": 0.2725526439781088,
"grad_norm": 0.68359375,
"learning_rate": 7.026666666666668e-05,
"loss": 0.3272,
"step": 1880
},
{
"epoch": 0.2740023920843753,
"grad_norm": 0.62890625,
"learning_rate": 7.020869565217392e-05,
"loss": 0.3063,
"step": 1890
},
{
"epoch": 0.2754521401906419,
"grad_norm": 0.71875,
"learning_rate": 7.015072463768117e-05,
"loss": 0.3126,
"step": 1900
},
{
"epoch": 0.2769018882969084,
"grad_norm": 0.73828125,
"learning_rate": 7.009275362318841e-05,
"loss": 0.3176,
"step": 1910
},
{
"epoch": 0.27835163640317495,
"grad_norm": 0.73046875,
"learning_rate": 7.003478260869566e-05,
"loss": 0.2929,
"step": 1920
},
{
"epoch": 0.2798013845094415,
"grad_norm": 0.6015625,
"learning_rate": 6.99768115942029e-05,
"loss": 0.3042,
"step": 1930
},
{
"epoch": 0.281251132615708,
"grad_norm": 0.61328125,
"learning_rate": 6.991884057971014e-05,
"loss": 0.3078,
"step": 1940
},
{
"epoch": 0.2827008807219746,
"grad_norm": 0.62890625,
"learning_rate": 6.98608695652174e-05,
"loss": 0.2965,
"step": 1950
},
{
"epoch": 0.2841506288282411,
"grad_norm": 0.6953125,
"learning_rate": 6.980289855072464e-05,
"loss": 0.3117,
"step": 1960
},
{
"epoch": 0.2856003769345076,
"grad_norm": 0.7421875,
"learning_rate": 6.974492753623189e-05,
"loss": 0.308,
"step": 1970
},
{
"epoch": 0.28705012504077415,
"grad_norm": 0.5859375,
"learning_rate": 6.968695652173914e-05,
"loss": 0.3016,
"step": 1980
},
{
"epoch": 0.2884998731470407,
"grad_norm": 0.71875,
"learning_rate": 6.962898550724638e-05,
"loss": 0.3058,
"step": 1990
},
{
"epoch": 0.28994962125330725,
"grad_norm": 0.6796875,
"learning_rate": 6.957101449275362e-05,
"loss": 0.3192,
"step": 2000
},
{
"epoch": 0.2913993693595738,
"grad_norm": 0.52734375,
"learning_rate": 6.951304347826086e-05,
"loss": 0.2834,
"step": 2010
},
{
"epoch": 0.2928491174658403,
"grad_norm": 0.68359375,
"learning_rate": 6.945507246376812e-05,
"loss": 0.3191,
"step": 2020
},
{
"epoch": 0.2942988655721068,
"grad_norm": 0.734375,
"learning_rate": 6.939710144927537e-05,
"loss": 0.3105,
"step": 2030
},
{
"epoch": 0.2957486136783734,
"grad_norm": 0.78125,
"learning_rate": 6.933913043478261e-05,
"loss": 0.3297,
"step": 2040
},
{
"epoch": 0.29719836178463993,
"grad_norm": 0.6015625,
"learning_rate": 6.928115942028987e-05,
"loss": 0.2988,
"step": 2050
},
{
"epoch": 0.29864810989090645,
"grad_norm": 0.59765625,
"learning_rate": 6.92231884057971e-05,
"loss": 0.3087,
"step": 2060
},
{
"epoch": 0.300097857997173,
"grad_norm": 0.59375,
"learning_rate": 6.916521739130435e-05,
"loss": 0.2707,
"step": 2070
},
{
"epoch": 0.3015476061034395,
"grad_norm": 0.69140625,
"learning_rate": 6.91072463768116e-05,
"loss": 0.3002,
"step": 2080
},
{
"epoch": 0.3029973542097061,
"grad_norm": 0.70703125,
"learning_rate": 6.904927536231885e-05,
"loss": 0.2856,
"step": 2090
},
{
"epoch": 0.3044471023159726,
"grad_norm": 0.671875,
"learning_rate": 6.899130434782609e-05,
"loss": 0.3159,
"step": 2100
},
{
"epoch": 0.30589685042223913,
"grad_norm": 0.59375,
"learning_rate": 6.893333333333335e-05,
"loss": 0.3096,
"step": 2110
},
{
"epoch": 0.30734659852850565,
"grad_norm": 0.69921875,
"learning_rate": 6.887536231884059e-05,
"loss": 0.3024,
"step": 2120
},
{
"epoch": 0.30879634663477223,
"grad_norm": 0.5703125,
"learning_rate": 6.881739130434783e-05,
"loss": 0.3032,
"step": 2130
},
{
"epoch": 0.31024609474103876,
"grad_norm": 0.828125,
"learning_rate": 6.875942028985508e-05,
"loss": 0.2851,
"step": 2140
},
{
"epoch": 0.3116958428473053,
"grad_norm": 0.5390625,
"learning_rate": 6.870144927536232e-05,
"loss": 0.2906,
"step": 2150
},
{
"epoch": 0.3131455909535718,
"grad_norm": 0.58203125,
"learning_rate": 6.864347826086957e-05,
"loss": 0.3115,
"step": 2160
},
{
"epoch": 0.31459533905983833,
"grad_norm": 0.59765625,
"learning_rate": 6.858550724637683e-05,
"loss": 0.2729,
"step": 2170
},
{
"epoch": 0.3160450871661049,
"grad_norm": 0.734375,
"learning_rate": 6.852753623188407e-05,
"loss": 0.2871,
"step": 2180
},
{
"epoch": 0.31749483527237143,
"grad_norm": 0.59375,
"learning_rate": 6.846956521739131e-05,
"loss": 0.2958,
"step": 2190
},
{
"epoch": 0.31894458337863796,
"grad_norm": 0.5625,
"learning_rate": 6.841159420289855e-05,
"loss": 0.2736,
"step": 2200
},
{
"epoch": 0.3203943314849045,
"grad_norm": 0.75,
"learning_rate": 6.83536231884058e-05,
"loss": 0.3054,
"step": 2210
},
{
"epoch": 0.32184407959117106,
"grad_norm": 0.70703125,
"learning_rate": 6.829565217391305e-05,
"loss": 0.293,
"step": 2220
},
{
"epoch": 0.3232938276974376,
"grad_norm": 0.62109375,
"learning_rate": 6.82376811594203e-05,
"loss": 0.2721,
"step": 2230
},
{
"epoch": 0.3247435758037041,
"grad_norm": 0.609375,
"learning_rate": 6.817971014492755e-05,
"loss": 0.2818,
"step": 2240
},
{
"epoch": 0.32619332390997063,
"grad_norm": 0.66796875,
"learning_rate": 6.812173913043479e-05,
"loss": 0.2873,
"step": 2250
},
{
"epoch": 0.32764307201623716,
"grad_norm": 0.62109375,
"learning_rate": 6.806376811594203e-05,
"loss": 0.2668,
"step": 2260
},
{
"epoch": 0.32909282012250374,
"grad_norm": 0.765625,
"learning_rate": 6.800579710144928e-05,
"loss": 0.2907,
"step": 2270
},
{
"epoch": 0.33054256822877026,
"grad_norm": 0.6640625,
"learning_rate": 6.794782608695652e-05,
"loss": 0.286,
"step": 2280
},
{
"epoch": 0.3319923163350368,
"grad_norm": 0.66796875,
"learning_rate": 6.788985507246378e-05,
"loss": 0.2899,
"step": 2290
},
{
"epoch": 0.3334420644413033,
"grad_norm": 0.66015625,
"learning_rate": 6.783188405797102e-05,
"loss": 0.2792,
"step": 2300
},
{
"epoch": 0.33489181254756983,
"grad_norm": 0.5625,
"learning_rate": 6.777391304347827e-05,
"loss": 0.2763,
"step": 2310
},
{
"epoch": 0.3363415606538364,
"grad_norm": 0.63671875,
"learning_rate": 6.771594202898551e-05,
"loss": 0.2702,
"step": 2320
},
{
"epoch": 0.33779130876010294,
"grad_norm": 0.66796875,
"learning_rate": 6.765797101449276e-05,
"loss": 0.2978,
"step": 2330
},
{
"epoch": 0.33924105686636946,
"grad_norm": 0.58984375,
"learning_rate": 6.76e-05,
"loss": 0.2618,
"step": 2340
},
{
"epoch": 0.340690804972636,
"grad_norm": 0.5390625,
"learning_rate": 6.754202898550726e-05,
"loss": 0.2816,
"step": 2350
},
{
"epoch": 0.34214055307890257,
"grad_norm": 0.73828125,
"learning_rate": 6.74840579710145e-05,
"loss": 0.2761,
"step": 2360
},
{
"epoch": 0.3435903011851691,
"grad_norm": 0.55078125,
"learning_rate": 6.742608695652174e-05,
"loss": 0.2618,
"step": 2370
},
{
"epoch": 0.3450400492914356,
"grad_norm": 0.62109375,
"learning_rate": 6.736811594202899e-05,
"loss": 0.2769,
"step": 2380
},
{
"epoch": 0.34648979739770214,
"grad_norm": 0.6484375,
"learning_rate": 6.731014492753623e-05,
"loss": 0.2797,
"step": 2390
},
{
"epoch": 0.34793954550396866,
"grad_norm": 0.65625,
"learning_rate": 6.725217391304348e-05,
"loss": 0.2731,
"step": 2400
},
{
"epoch": 0.34938929361023524,
"grad_norm": 0.64453125,
"learning_rate": 6.719420289855074e-05,
"loss": 0.2711,
"step": 2410
},
{
"epoch": 0.35083904171650176,
"grad_norm": 0.8046875,
"learning_rate": 6.713623188405798e-05,
"loss": 0.2687,
"step": 2420
},
{
"epoch": 0.3522887898227683,
"grad_norm": 0.71484375,
"learning_rate": 6.707826086956522e-05,
"loss": 0.2965,
"step": 2430
},
{
"epoch": 0.3537385379290348,
"grad_norm": 0.5,
"learning_rate": 6.702028985507247e-05,
"loss": 0.2596,
"step": 2440
},
{
"epoch": 0.3551882860353014,
"grad_norm": 0.56640625,
"learning_rate": 6.696231884057971e-05,
"loss": 0.2896,
"step": 2450
},
{
"epoch": 0.3566380341415679,
"grad_norm": 0.6484375,
"learning_rate": 6.690434782608697e-05,
"loss": 0.261,
"step": 2460
},
{
"epoch": 0.35808778224783444,
"grad_norm": 0.65625,
"learning_rate": 6.68463768115942e-05,
"loss": 0.2846,
"step": 2470
},
{
"epoch": 0.35953753035410096,
"grad_norm": 0.66015625,
"learning_rate": 6.678840579710146e-05,
"loss": 0.2697,
"step": 2480
},
{
"epoch": 0.3609872784603675,
"grad_norm": 0.6484375,
"learning_rate": 6.67304347826087e-05,
"loss": 0.2742,
"step": 2490
},
{
"epoch": 0.36243702656663407,
"grad_norm": 0.65234375,
"learning_rate": 6.667246376811594e-05,
"loss": 0.2517,
"step": 2500
},
{
"epoch": 0.3638867746729006,
"grad_norm": 0.5234375,
"learning_rate": 6.661449275362319e-05,
"loss": 0.2671,
"step": 2510
},
{
"epoch": 0.3653365227791671,
"grad_norm": 0.65234375,
"learning_rate": 6.655652173913045e-05,
"loss": 0.2899,
"step": 2520
},
{
"epoch": 0.36678627088543364,
"grad_norm": 0.6015625,
"learning_rate": 6.649855072463769e-05,
"loss": 0.2864,
"step": 2530
},
{
"epoch": 0.36823601899170016,
"grad_norm": 0.5859375,
"learning_rate": 6.644057971014494e-05,
"loss": 0.2791,
"step": 2540
},
{
"epoch": 0.36968576709796674,
"grad_norm": 0.63671875,
"learning_rate": 6.638260869565218e-05,
"loss": 0.2804,
"step": 2550
},
{
"epoch": 0.37113551520423327,
"grad_norm": 0.6171875,
"learning_rate": 6.632463768115942e-05,
"loss": 0.2704,
"step": 2560
},
{
"epoch": 0.3725852633104998,
"grad_norm": 0.62109375,
"learning_rate": 6.626666666666667e-05,
"loss": 0.2603,
"step": 2570
},
{
"epoch": 0.3740350114167663,
"grad_norm": 0.5859375,
"learning_rate": 6.620869565217391e-05,
"loss": 0.2523,
"step": 2580
},
{
"epoch": 0.3754847595230329,
"grad_norm": 0.6484375,
"learning_rate": 6.615072463768117e-05,
"loss": 0.233,
"step": 2590
},
{
"epoch": 0.3769345076292994,
"grad_norm": 0.61328125,
"learning_rate": 6.609275362318842e-05,
"loss": 0.2582,
"step": 2600
},
{
"epoch": 0.37838425573556594,
"grad_norm": 0.6328125,
"learning_rate": 6.603478260869566e-05,
"loss": 0.2645,
"step": 2610
},
{
"epoch": 0.37983400384183247,
"grad_norm": 0.5625,
"learning_rate": 6.59768115942029e-05,
"loss": 0.2793,
"step": 2620
},
{
"epoch": 0.381283751948099,
"grad_norm": 0.51953125,
"learning_rate": 6.591884057971014e-05,
"loss": 0.2791,
"step": 2630
},
{
"epoch": 0.38273350005436557,
"grad_norm": 0.625,
"learning_rate": 6.58608695652174e-05,
"loss": 0.2648,
"step": 2640
},
{
"epoch": 0.3841832481606321,
"grad_norm": 0.59375,
"learning_rate": 6.580289855072465e-05,
"loss": 0.2711,
"step": 2650
},
{
"epoch": 0.3856329962668986,
"grad_norm": 0.5234375,
"learning_rate": 6.574492753623189e-05,
"loss": 0.2481,
"step": 2660
},
{
"epoch": 0.38708274437316514,
"grad_norm": 0.74609375,
"learning_rate": 6.568695652173914e-05,
"loss": 0.2791,
"step": 2670
},
{
"epoch": 0.3885324924794317,
"grad_norm": 0.62890625,
"learning_rate": 6.562898550724638e-05,
"loss": 0.2713,
"step": 2680
},
{
"epoch": 0.38998224058569825,
"grad_norm": 0.62109375,
"learning_rate": 6.557101449275362e-05,
"loss": 0.2781,
"step": 2690
},
{
"epoch": 0.39143198869196477,
"grad_norm": 0.640625,
"learning_rate": 6.551304347826088e-05,
"loss": 0.2546,
"step": 2700
},
{
"epoch": 0.3928817367982313,
"grad_norm": 0.671875,
"learning_rate": 6.545507246376813e-05,
"loss": 0.2618,
"step": 2710
},
{
"epoch": 0.3943314849044978,
"grad_norm": 0.59375,
"learning_rate": 6.539710144927537e-05,
"loss": 0.2537,
"step": 2720
},
{
"epoch": 0.3957812330107644,
"grad_norm": 0.56640625,
"learning_rate": 6.533913043478261e-05,
"loss": 0.2528,
"step": 2730
},
{
"epoch": 0.3972309811170309,
"grad_norm": 0.61328125,
"learning_rate": 6.528115942028986e-05,
"loss": 0.2544,
"step": 2740
},
{
"epoch": 0.39868072922329745,
"grad_norm": 0.625,
"learning_rate": 6.52231884057971e-05,
"loss": 0.2668,
"step": 2750
},
{
"epoch": 0.40013047732956397,
"grad_norm": 0.5390625,
"learning_rate": 6.516521739130436e-05,
"loss": 0.2607,
"step": 2760
},
{
"epoch": 0.40158022543583055,
"grad_norm": 0.73828125,
"learning_rate": 6.51072463768116e-05,
"loss": 0.2564,
"step": 2770
},
{
"epoch": 0.4030299735420971,
"grad_norm": 0.578125,
"learning_rate": 6.504927536231885e-05,
"loss": 0.2758,
"step": 2780
},
{
"epoch": 0.4044797216483636,
"grad_norm": 0.6015625,
"learning_rate": 6.499130434782609e-05,
"loss": 0.2584,
"step": 2790
},
{
"epoch": 0.4059294697546301,
"grad_norm": 0.546875,
"learning_rate": 6.493333333333333e-05,
"loss": 0.2494,
"step": 2800
},
{
"epoch": 0.40737921786089665,
"grad_norm": 0.5703125,
"learning_rate": 6.487536231884058e-05,
"loss": 0.2507,
"step": 2810
},
{
"epoch": 0.4088289659671632,
"grad_norm": 0.546875,
"learning_rate": 6.481739130434782e-05,
"loss": 0.2743,
"step": 2820
},
{
"epoch": 0.41027871407342975,
"grad_norm": 0.60546875,
"learning_rate": 6.475942028985508e-05,
"loss": 0.2454,
"step": 2830
},
{
"epoch": 0.4117284621796963,
"grad_norm": 0.494140625,
"learning_rate": 6.470144927536233e-05,
"loss": 0.2636,
"step": 2840
},
{
"epoch": 0.4131782102859628,
"grad_norm": 0.61328125,
"learning_rate": 6.464347826086957e-05,
"loss": 0.2498,
"step": 2850
},
{
"epoch": 0.4146279583922293,
"grad_norm": 0.578125,
"learning_rate": 6.458550724637681e-05,
"loss": 0.2395,
"step": 2860
},
{
"epoch": 0.4160777064984959,
"grad_norm": 0.60546875,
"learning_rate": 6.452753623188407e-05,
"loss": 0.2689,
"step": 2870
},
{
"epoch": 0.4175274546047624,
"grad_norm": 0.7109375,
"learning_rate": 6.44695652173913e-05,
"loss": 0.2662,
"step": 2880
},
{
"epoch": 0.41897720271102895,
"grad_norm": 0.59765625,
"learning_rate": 6.441159420289856e-05,
"loss": 0.2698,
"step": 2890
},
{
"epoch": 0.4204269508172955,
"grad_norm": 0.6015625,
"learning_rate": 6.43536231884058e-05,
"loss": 0.2465,
"step": 2900
},
{
"epoch": 0.42187669892356205,
"grad_norm": 0.72265625,
"learning_rate": 6.429565217391305e-05,
"loss": 0.2561,
"step": 2910
},
{
"epoch": 0.4233264470298286,
"grad_norm": 0.55078125,
"learning_rate": 6.423768115942029e-05,
"loss": 0.2564,
"step": 2920
},
{
"epoch": 0.4247761951360951,
"grad_norm": 0.640625,
"learning_rate": 6.417971014492753e-05,
"loss": 0.2568,
"step": 2930
},
{
"epoch": 0.4262259432423616,
"grad_norm": 0.4921875,
"learning_rate": 6.412173913043479e-05,
"loss": 0.2624,
"step": 2940
},
{
"epoch": 0.42767569134862815,
"grad_norm": 0.5234375,
"learning_rate": 6.406376811594204e-05,
"loss": 0.2602,
"step": 2950
},
{
"epoch": 0.42912543945489473,
"grad_norm": 0.58984375,
"learning_rate": 6.400579710144928e-05,
"loss": 0.233,
"step": 2960
},
{
"epoch": 0.43057518756116125,
"grad_norm": 0.62109375,
"learning_rate": 6.394782608695653e-05,
"loss": 0.2452,
"step": 2970
},
{
"epoch": 0.4320249356674278,
"grad_norm": 0.57421875,
"learning_rate": 6.388985507246377e-05,
"loss": 0.2501,
"step": 2980
},
{
"epoch": 0.4334746837736943,
"grad_norm": 0.453125,
"learning_rate": 6.383188405797101e-05,
"loss": 0.2431,
"step": 2990
},
{
"epoch": 0.4349244318799609,
"grad_norm": 0.55859375,
"learning_rate": 6.377391304347827e-05,
"loss": 0.2476,
"step": 3000
},
{
"epoch": 0.4363741799862274,
"grad_norm": 0.6171875,
"learning_rate": 6.371594202898551e-05,
"loss": 0.2598,
"step": 3010
},
{
"epoch": 0.43782392809249393,
"grad_norm": 0.6015625,
"learning_rate": 6.365797101449276e-05,
"loss": 0.2361,
"step": 3020
},
{
"epoch": 0.43927367619876045,
"grad_norm": 0.55859375,
"learning_rate": 6.360000000000001e-05,
"loss": 0.2299,
"step": 3030
},
{
"epoch": 0.440723424305027,
"grad_norm": 0.482421875,
"learning_rate": 6.354202898550725e-05,
"loss": 0.2437,
"step": 3040
},
{
"epoch": 0.44217317241129356,
"grad_norm": 0.51171875,
"learning_rate": 6.34840579710145e-05,
"loss": 0.245,
"step": 3050
},
{
"epoch": 0.4436229205175601,
"grad_norm": 0.578125,
"learning_rate": 6.342608695652175e-05,
"loss": 0.244,
"step": 3060
},
{
"epoch": 0.4450726686238266,
"grad_norm": 0.578125,
"learning_rate": 6.336811594202899e-05,
"loss": 0.255,
"step": 3070
},
{
"epoch": 0.44652241673009313,
"grad_norm": 0.54296875,
"learning_rate": 6.331014492753624e-05,
"loss": 0.2598,
"step": 3080
},
{
"epoch": 0.4479721648363597,
"grad_norm": 0.53125,
"learning_rate": 6.325217391304348e-05,
"loss": 0.2644,
"step": 3090
},
{
"epoch": 0.44942191294262623,
"grad_norm": 0.515625,
"learning_rate": 6.319420289855074e-05,
"loss": 0.2632,
"step": 3100
},
{
"epoch": 0.45087166104889276,
"grad_norm": 0.58203125,
"learning_rate": 6.313623188405798e-05,
"loss": 0.2403,
"step": 3110
},
{
"epoch": 0.4523214091551593,
"grad_norm": 0.5390625,
"learning_rate": 6.307826086956522e-05,
"loss": 0.2423,
"step": 3120
},
{
"epoch": 0.4537711572614258,
"grad_norm": 0.5703125,
"learning_rate": 6.302028985507247e-05,
"loss": 0.2459,
"step": 3130
},
{
"epoch": 0.4552209053676924,
"grad_norm": 0.5390625,
"learning_rate": 6.296231884057972e-05,
"loss": 0.2512,
"step": 3140
},
{
"epoch": 0.4566706534739589,
"grad_norm": 0.60546875,
"learning_rate": 6.290434782608696e-05,
"loss": 0.2365,
"step": 3150
},
{
"epoch": 0.45812040158022543,
"grad_norm": 0.53515625,
"learning_rate": 6.28463768115942e-05,
"loss": 0.2357,
"step": 3160
},
{
"epoch": 0.45957014968649196,
"grad_norm": 0.62890625,
"learning_rate": 6.278840579710146e-05,
"loss": 0.2432,
"step": 3170
},
{
"epoch": 0.4610198977927585,
"grad_norm": 0.5859375,
"learning_rate": 6.27304347826087e-05,
"loss": 0.2558,
"step": 3180
},
{
"epoch": 0.46246964589902506,
"grad_norm": 0.59765625,
"learning_rate": 6.267246376811595e-05,
"loss": 0.2411,
"step": 3190
},
{
"epoch": 0.4639193940052916,
"grad_norm": 0.5390625,
"learning_rate": 6.261449275362319e-05,
"loss": 0.2433,
"step": 3200
},
{
"epoch": 0.4653691421115581,
"grad_norm": 0.55859375,
"learning_rate": 6.255652173913044e-05,
"loss": 0.2503,
"step": 3210
},
{
"epoch": 0.46681889021782463,
"grad_norm": 0.67578125,
"learning_rate": 6.249855072463768e-05,
"loss": 0.2381,
"step": 3220
},
{
"epoch": 0.4682686383240912,
"grad_norm": 0.53125,
"learning_rate": 6.244057971014492e-05,
"loss": 0.2276,
"step": 3230
},
{
"epoch": 0.46971838643035774,
"grad_norm": 0.54296875,
"learning_rate": 6.238260869565218e-05,
"loss": 0.2273,
"step": 3240
},
{
"epoch": 0.47116813453662426,
"grad_norm": 0.490234375,
"learning_rate": 6.232463768115942e-05,
"loss": 0.2431,
"step": 3250
},
{
"epoch": 0.4726178826428908,
"grad_norm": 0.5546875,
"learning_rate": 6.226666666666667e-05,
"loss": 0.232,
"step": 3260
},
{
"epoch": 0.4740676307491573,
"grad_norm": 0.49609375,
"learning_rate": 6.220869565217392e-05,
"loss": 0.2572,
"step": 3270
},
{
"epoch": 0.4755173788554239,
"grad_norm": 0.54296875,
"learning_rate": 6.215072463768116e-05,
"loss": 0.2468,
"step": 3280
},
{
"epoch": 0.4769671269616904,
"grad_norm": 0.57421875,
"learning_rate": 6.20927536231884e-05,
"loss": 0.2344,
"step": 3290
},
{
"epoch": 0.47841687506795694,
"grad_norm": 0.58984375,
"learning_rate": 6.203478260869566e-05,
"loss": 0.2515,
"step": 3300
},
{
"epoch": 0.47986662317422346,
"grad_norm": 0.470703125,
"learning_rate": 6.19768115942029e-05,
"loss": 0.2252,
"step": 3310
},
{
"epoch": 0.48131637128049004,
"grad_norm": 0.640625,
"learning_rate": 6.191884057971015e-05,
"loss": 0.2335,
"step": 3320
},
{
"epoch": 0.48276611938675656,
"grad_norm": 0.75390625,
"learning_rate": 6.18608695652174e-05,
"loss": 0.237,
"step": 3330
},
{
"epoch": 0.4842158674930231,
"grad_norm": 0.5,
"learning_rate": 6.180289855072465e-05,
"loss": 0.249,
"step": 3340
},
{
"epoch": 0.4856656155992896,
"grad_norm": 0.61328125,
"learning_rate": 6.174492753623189e-05,
"loss": 0.2494,
"step": 3350
},
{
"epoch": 0.48711536370555614,
"grad_norm": 0.51953125,
"learning_rate": 6.168695652173913e-05,
"loss": 0.222,
"step": 3360
},
{
"epoch": 0.4885651118118227,
"grad_norm": 0.68359375,
"learning_rate": 6.162898550724638e-05,
"loss": 0.2546,
"step": 3370
},
{
"epoch": 0.49001485991808924,
"grad_norm": 0.5234375,
"learning_rate": 6.157101449275363e-05,
"loss": 0.2299,
"step": 3380
},
{
"epoch": 0.49146460802435576,
"grad_norm": 0.55859375,
"learning_rate": 6.151304347826087e-05,
"loss": 0.2391,
"step": 3390
},
{
"epoch": 0.4929143561306223,
"grad_norm": 0.59765625,
"learning_rate": 6.145507246376813e-05,
"loss": 0.2433,
"step": 3400
},
{
"epoch": 0.49436410423688887,
"grad_norm": 0.6640625,
"learning_rate": 6.139710144927537e-05,
"loss": 0.2319,
"step": 3410
},
{
"epoch": 0.4958138523431554,
"grad_norm": 0.431640625,
"learning_rate": 6.133913043478261e-05,
"loss": 0.2208,
"step": 3420
},
{
"epoch": 0.4972636004494219,
"grad_norm": 0.447265625,
"learning_rate": 6.128115942028986e-05,
"loss": 0.2332,
"step": 3430
},
{
"epoch": 0.49871334855568844,
"grad_norm": 0.51171875,
"learning_rate": 6.12231884057971e-05,
"loss": 0.2456,
"step": 3440
},
{
"epoch": 0.500163096661955,
"grad_norm": 0.51953125,
"learning_rate": 6.116521739130435e-05,
"loss": 0.2422,
"step": 3450
},
{
"epoch": 0.5016128447682215,
"grad_norm": 0.55078125,
"learning_rate": 6.110724637681161e-05,
"loss": 0.221,
"step": 3460
},
{
"epoch": 0.5030625928744881,
"grad_norm": 0.53515625,
"learning_rate": 6.104927536231885e-05,
"loss": 0.2392,
"step": 3470
},
{
"epoch": 0.5045123409807546,
"grad_norm": 0.61328125,
"learning_rate": 6.0991304347826095e-05,
"loss": 0.2368,
"step": 3480
},
{
"epoch": 0.5059620890870211,
"grad_norm": 0.578125,
"learning_rate": 6.093333333333334e-05,
"loss": 0.2317,
"step": 3490
},
{
"epoch": 0.5074118371932876,
"grad_norm": 0.48828125,
"learning_rate": 6.087536231884058e-05,
"loss": 0.2127,
"step": 3500
},
{
"epoch": 0.5088615852995542,
"grad_norm": 0.55078125,
"learning_rate": 6.0817391304347835e-05,
"loss": 0.2418,
"step": 3510
},
{
"epoch": 0.5103113334058207,
"grad_norm": 0.447265625,
"learning_rate": 6.0759420289855075e-05,
"loss": 0.2289,
"step": 3520
},
{
"epoch": 0.5117610815120873,
"grad_norm": 0.609375,
"learning_rate": 6.070144927536232e-05,
"loss": 0.2398,
"step": 3530
},
{
"epoch": 0.5132108296183538,
"grad_norm": 0.5546875,
"learning_rate": 6.0643478260869576e-05,
"loss": 0.2452,
"step": 3540
},
{
"epoch": 0.5146605777246204,
"grad_norm": 0.61328125,
"learning_rate": 6.0585507246376816e-05,
"loss": 0.2351,
"step": 3550
},
{
"epoch": 0.5161103258308869,
"grad_norm": 0.46875,
"learning_rate": 6.052753623188406e-05,
"loss": 0.2268,
"step": 3560
},
{
"epoch": 0.5175600739371534,
"grad_norm": 0.5390625,
"learning_rate": 6.046956521739131e-05,
"loss": 0.2385,
"step": 3570
},
{
"epoch": 0.5190098220434199,
"grad_norm": 0.54296875,
"learning_rate": 6.0411594202898556e-05,
"loss": 0.231,
"step": 3580
},
{
"epoch": 0.5204595701496865,
"grad_norm": 0.498046875,
"learning_rate": 6.03536231884058e-05,
"loss": 0.2293,
"step": 3590
},
{
"epoch": 0.521909318255953,
"grad_norm": 0.56640625,
"learning_rate": 6.029565217391304e-05,
"loss": 0.2329,
"step": 3600
},
{
"epoch": 0.5233590663622195,
"grad_norm": 0.4921875,
"learning_rate": 6.02376811594203e-05,
"loss": 0.2312,
"step": 3610
},
{
"epoch": 0.5248088144684862,
"grad_norm": 0.49609375,
"learning_rate": 6.0179710144927544e-05,
"loss": 0.2372,
"step": 3620
},
{
"epoch": 0.5262585625747527,
"grad_norm": 0.59765625,
"learning_rate": 6.0121739130434784e-05,
"loss": 0.2384,
"step": 3630
},
{
"epoch": 0.5277083106810192,
"grad_norm": 0.51953125,
"learning_rate": 6.006376811594203e-05,
"loss": 0.2263,
"step": 3640
},
{
"epoch": 0.5291580587872857,
"grad_norm": 0.5234375,
"learning_rate": 6.0005797101449284e-05,
"loss": 0.2296,
"step": 3650
},
{
"epoch": 0.5306078068935522,
"grad_norm": 0.4375,
"learning_rate": 5.9947826086956524e-05,
"loss": 0.2272,
"step": 3660
},
{
"epoch": 0.5320575549998188,
"grad_norm": 0.5546875,
"learning_rate": 5.988985507246377e-05,
"loss": 0.2432,
"step": 3670
},
{
"epoch": 0.5335073031060853,
"grad_norm": 0.56640625,
"learning_rate": 5.9831884057971025e-05,
"loss": 0.2289,
"step": 3680
},
{
"epoch": 0.5349570512123518,
"grad_norm": 0.458984375,
"learning_rate": 5.9773913043478265e-05,
"loss": 0.2399,
"step": 3690
},
{
"epoch": 0.5364067993186183,
"grad_norm": 0.5390625,
"learning_rate": 5.971594202898551e-05,
"loss": 0.23,
"step": 3700
},
{
"epoch": 0.537856547424885,
"grad_norm": 0.6796875,
"learning_rate": 5.965797101449275e-05,
"loss": 0.2177,
"step": 3710
},
{
"epoch": 0.5393062955311515,
"grad_norm": 0.6640625,
"learning_rate": 5.9600000000000005e-05,
"loss": 0.259,
"step": 3720
},
{
"epoch": 0.540756043637418,
"grad_norm": 0.5234375,
"learning_rate": 5.954202898550725e-05,
"loss": 0.2158,
"step": 3730
},
{
"epoch": 0.5422057917436846,
"grad_norm": 0.5234375,
"learning_rate": 5.948405797101449e-05,
"loss": 0.2186,
"step": 3740
},
{
"epoch": 0.5436555398499511,
"grad_norm": 0.48828125,
"learning_rate": 5.9426086956521746e-05,
"loss": 0.2221,
"step": 3750
},
{
"epoch": 0.5451052879562176,
"grad_norm": 0.498046875,
"learning_rate": 5.936811594202899e-05,
"loss": 0.2439,
"step": 3760
},
{
"epoch": 0.5465550360624841,
"grad_norm": 0.58984375,
"learning_rate": 5.931014492753623e-05,
"loss": 0.2281,
"step": 3770
},
{
"epoch": 0.5480047841687506,
"grad_norm": 0.54296875,
"learning_rate": 5.9252173913043486e-05,
"loss": 0.2281,
"step": 3780
},
{
"epoch": 0.5494545322750172,
"grad_norm": 0.62109375,
"learning_rate": 5.9194202898550726e-05,
"loss": 0.2292,
"step": 3790
},
{
"epoch": 0.5509042803812838,
"grad_norm": 0.6171875,
"learning_rate": 5.913623188405797e-05,
"loss": 0.2315,
"step": 3800
},
{
"epoch": 0.5523540284875503,
"grad_norm": 0.64453125,
"learning_rate": 5.907826086956523e-05,
"loss": 0.2241,
"step": 3810
},
{
"epoch": 0.5538037765938169,
"grad_norm": 0.427734375,
"learning_rate": 5.902028985507247e-05,
"loss": 0.2082,
"step": 3820
},
{
"epoch": 0.5552535247000834,
"grad_norm": 0.6015625,
"learning_rate": 5.8962318840579714e-05,
"loss": 0.229,
"step": 3830
},
{
"epoch": 0.5567032728063499,
"grad_norm": 0.5390625,
"learning_rate": 5.890434782608697e-05,
"loss": 0.2166,
"step": 3840
},
{
"epoch": 0.5581530209126164,
"grad_norm": 0.57421875,
"learning_rate": 5.884637681159421e-05,
"loss": 0.2261,
"step": 3850
},
{
"epoch": 0.559602769018883,
"grad_norm": 0.5390625,
"learning_rate": 5.8788405797101454e-05,
"loss": 0.207,
"step": 3860
},
{
"epoch": 0.5610525171251495,
"grad_norm": 0.466796875,
"learning_rate": 5.8730434782608694e-05,
"loss": 0.2172,
"step": 3870
},
{
"epoch": 0.562502265231416,
"grad_norm": 0.5546875,
"learning_rate": 5.867246376811595e-05,
"loss": 0.2298,
"step": 3880
},
{
"epoch": 0.5639520133376825,
"grad_norm": 0.435546875,
"learning_rate": 5.8614492753623195e-05,
"loss": 0.2342,
"step": 3890
},
{
"epoch": 0.5654017614439492,
"grad_norm": 0.494140625,
"learning_rate": 5.8556521739130435e-05,
"loss": 0.2461,
"step": 3900
},
{
"epoch": 0.5668515095502157,
"grad_norm": 0.486328125,
"learning_rate": 5.849855072463769e-05,
"loss": 0.2266,
"step": 3910
},
{
"epoch": 0.5683012576564822,
"grad_norm": 0.59375,
"learning_rate": 5.8440579710144935e-05,
"loss": 0.233,
"step": 3920
},
{
"epoch": 0.5697510057627487,
"grad_norm": 0.478515625,
"learning_rate": 5.8382608695652175e-05,
"loss": 0.2371,
"step": 3930
},
{
"epoch": 0.5712007538690153,
"grad_norm": 0.65625,
"learning_rate": 5.832463768115943e-05,
"loss": 0.2359,
"step": 3940
},
{
"epoch": 0.5726505019752818,
"grad_norm": 0.609375,
"learning_rate": 5.8266666666666676e-05,
"loss": 0.2124,
"step": 3950
},
{
"epoch": 0.5741002500815483,
"grad_norm": 0.439453125,
"learning_rate": 5.8208695652173916e-05,
"loss": 0.2367,
"step": 3960
},
{
"epoch": 0.5755499981878148,
"grad_norm": 0.462890625,
"learning_rate": 5.815072463768117e-05,
"loss": 0.2273,
"step": 3970
},
{
"epoch": 0.5769997462940814,
"grad_norm": 0.52734375,
"learning_rate": 5.809275362318841e-05,
"loss": 0.2222,
"step": 3980
},
{
"epoch": 0.578449494400348,
"grad_norm": 0.58203125,
"learning_rate": 5.8034782608695656e-05,
"loss": 0.2161,
"step": 3990
},
{
"epoch": 0.5798992425066145,
"grad_norm": 0.54296875,
"learning_rate": 5.79768115942029e-05,
"loss": 0.2413,
"step": 4000
},
{
"epoch": 0.581348990612881,
"grad_norm": 0.47265625,
"learning_rate": 5.791884057971015e-05,
"loss": 0.2159,
"step": 4010
},
{
"epoch": 0.5827987387191476,
"grad_norm": 0.4765625,
"learning_rate": 5.78608695652174e-05,
"loss": 0.2028,
"step": 4020
},
{
"epoch": 0.5842484868254141,
"grad_norm": 0.55859375,
"learning_rate": 5.7802898550724644e-05,
"loss": 0.2264,
"step": 4030
},
{
"epoch": 0.5856982349316806,
"grad_norm": 0.5078125,
"learning_rate": 5.774492753623189e-05,
"loss": 0.2126,
"step": 4040
},
{
"epoch": 0.5871479830379471,
"grad_norm": 0.439453125,
"learning_rate": 5.768695652173914e-05,
"loss": 0.2263,
"step": 4050
},
{
"epoch": 0.5885977311442137,
"grad_norm": 0.4765625,
"learning_rate": 5.762898550724638e-05,
"loss": 0.2152,
"step": 4060
},
{
"epoch": 0.5900474792504802,
"grad_norm": 0.55859375,
"learning_rate": 5.7571014492753624e-05,
"loss": 0.2294,
"step": 4070
},
{
"epoch": 0.5914972273567468,
"grad_norm": 0.44921875,
"learning_rate": 5.751304347826088e-05,
"loss": 0.1966,
"step": 4080
},
{
"epoch": 0.5929469754630133,
"grad_norm": 0.5859375,
"learning_rate": 5.745507246376812e-05,
"loss": 0.2252,
"step": 4090
},
{
"epoch": 0.5943967235692799,
"grad_norm": 0.4609375,
"learning_rate": 5.7397101449275365e-05,
"loss": 0.2154,
"step": 4100
},
{
"epoch": 0.5958464716755464,
"grad_norm": 0.41015625,
"learning_rate": 5.733913043478262e-05,
"loss": 0.2171,
"step": 4110
},
{
"epoch": 0.5972962197818129,
"grad_norm": 0.49609375,
"learning_rate": 5.728115942028986e-05,
"loss": 0.2163,
"step": 4120
},
{
"epoch": 0.5987459678880794,
"grad_norm": 0.439453125,
"learning_rate": 5.7223188405797105e-05,
"loss": 0.219,
"step": 4130
},
{
"epoch": 0.600195715994346,
"grad_norm": 0.51171875,
"learning_rate": 5.7165217391304345e-05,
"loss": 0.2188,
"step": 4140
},
{
"epoch": 0.6016454641006125,
"grad_norm": 0.57421875,
"learning_rate": 5.71072463768116e-05,
"loss": 0.2345,
"step": 4150
},
{
"epoch": 0.603095212206879,
"grad_norm": 0.5234375,
"learning_rate": 5.7049275362318846e-05,
"loss": 0.2327,
"step": 4160
},
{
"epoch": 0.6045449603131456,
"grad_norm": 0.5703125,
"learning_rate": 5.6991304347826086e-05,
"loss": 0.2293,
"step": 4170
},
{
"epoch": 0.6059947084194122,
"grad_norm": 0.498046875,
"learning_rate": 5.693333333333334e-05,
"loss": 0.2303,
"step": 4180
},
{
"epoch": 0.6074444565256787,
"grad_norm": 0.515625,
"learning_rate": 5.6875362318840586e-05,
"loss": 0.2136,
"step": 4190
},
{
"epoch": 0.6088942046319452,
"grad_norm": 0.5390625,
"learning_rate": 5.6817391304347826e-05,
"loss": 0.2186,
"step": 4200
},
{
"epoch": 0.6103439527382117,
"grad_norm": 0.5546875,
"learning_rate": 5.675942028985508e-05,
"loss": 0.2201,
"step": 4210
},
{
"epoch": 0.6117937008444783,
"grad_norm": 0.490234375,
"learning_rate": 5.670144927536232e-05,
"loss": 0.2185,
"step": 4220
},
{
"epoch": 0.6132434489507448,
"grad_norm": 0.50390625,
"learning_rate": 5.664347826086957e-05,
"loss": 0.2305,
"step": 4230
},
{
"epoch": 0.6146931970570113,
"grad_norm": 0.484375,
"learning_rate": 5.658550724637682e-05,
"loss": 0.2191,
"step": 4240
},
{
"epoch": 0.6161429451632778,
"grad_norm": 0.494140625,
"learning_rate": 5.652753623188406e-05,
"loss": 0.2035,
"step": 4250
},
{
"epoch": 0.6175926932695445,
"grad_norm": 0.609375,
"learning_rate": 5.646956521739131e-05,
"loss": 0.2247,
"step": 4260
},
{
"epoch": 0.619042441375811,
"grad_norm": 0.47265625,
"learning_rate": 5.641159420289856e-05,
"loss": 0.221,
"step": 4270
},
{
"epoch": 0.6204921894820775,
"grad_norm": 0.6015625,
"learning_rate": 5.63536231884058e-05,
"loss": 0.2387,
"step": 4280
},
{
"epoch": 0.621941937588344,
"grad_norm": 0.50390625,
"learning_rate": 5.629565217391305e-05,
"loss": 0.1979,
"step": 4290
},
{
"epoch": 0.6233916856946106,
"grad_norm": 0.478515625,
"learning_rate": 5.62376811594203e-05,
"loss": 0.2299,
"step": 4300
},
{
"epoch": 0.6248414338008771,
"grad_norm": 0.58203125,
"learning_rate": 5.617971014492754e-05,
"loss": 0.2211,
"step": 4310
},
{
"epoch": 0.6262911819071436,
"grad_norm": 0.5,
"learning_rate": 5.612173913043479e-05,
"loss": 0.199,
"step": 4320
},
{
"epoch": 0.6277409300134101,
"grad_norm": 0.43359375,
"learning_rate": 5.606376811594203e-05,
"loss": 0.2088,
"step": 4330
},
{
"epoch": 0.6291906781196767,
"grad_norm": 0.5078125,
"learning_rate": 5.600579710144928e-05,
"loss": 0.209,
"step": 4340
},
{
"epoch": 0.6306404262259433,
"grad_norm": 0.59765625,
"learning_rate": 5.594782608695653e-05,
"loss": 0.2156,
"step": 4350
},
{
"epoch": 0.6320901743322098,
"grad_norm": 0.5078125,
"learning_rate": 5.588985507246377e-05,
"loss": 0.2151,
"step": 4360
},
{
"epoch": 0.6335399224384763,
"grad_norm": 0.57421875,
"learning_rate": 5.583188405797102e-05,
"loss": 0.2191,
"step": 4370
},
{
"epoch": 0.6349896705447429,
"grad_norm": 0.55859375,
"learning_rate": 5.577391304347827e-05,
"loss": 0.239,
"step": 4380
},
{
"epoch": 0.6364394186510094,
"grad_norm": 0.65234375,
"learning_rate": 5.571594202898551e-05,
"loss": 0.1998,
"step": 4390
},
{
"epoch": 0.6378891667572759,
"grad_norm": 0.447265625,
"learning_rate": 5.565797101449276e-05,
"loss": 0.2214,
"step": 4400
},
{
"epoch": 0.6393389148635424,
"grad_norm": 0.5625,
"learning_rate": 5.56e-05,
"loss": 0.2256,
"step": 4410
},
{
"epoch": 0.640788662969809,
"grad_norm": 0.59375,
"learning_rate": 5.554202898550725e-05,
"loss": 0.2224,
"step": 4420
},
{
"epoch": 0.6422384110760755,
"grad_norm": 0.51953125,
"learning_rate": 5.54840579710145e-05,
"loss": 0.2107,
"step": 4430
},
{
"epoch": 0.6436881591823421,
"grad_norm": 0.52734375,
"learning_rate": 5.5426086956521743e-05,
"loss": 0.2282,
"step": 4440
},
{
"epoch": 0.6451379072886086,
"grad_norm": 0.59375,
"learning_rate": 5.536811594202899e-05,
"loss": 0.2239,
"step": 4450
},
{
"epoch": 0.6465876553948752,
"grad_norm": 0.478515625,
"learning_rate": 5.531014492753624e-05,
"loss": 0.2282,
"step": 4460
},
{
"epoch": 0.6480374035011417,
"grad_norm": 0.5,
"learning_rate": 5.5252173913043484e-05,
"loss": 0.2142,
"step": 4470
},
{
"epoch": 0.6494871516074082,
"grad_norm": 0.53125,
"learning_rate": 5.519420289855073e-05,
"loss": 0.2231,
"step": 4480
},
{
"epoch": 0.6509368997136747,
"grad_norm": 0.42578125,
"learning_rate": 5.513623188405797e-05,
"loss": 0.2153,
"step": 4490
},
{
"epoch": 0.6523866478199413,
"grad_norm": 0.51953125,
"learning_rate": 5.507826086956522e-05,
"loss": 0.2235,
"step": 4500
},
{
"epoch": 0.6538363959262078,
"grad_norm": 0.5390625,
"learning_rate": 5.502028985507247e-05,
"loss": 0.243,
"step": 4510
},
{
"epoch": 0.6552861440324743,
"grad_norm": 0.53125,
"learning_rate": 5.496231884057971e-05,
"loss": 0.2324,
"step": 4520
},
{
"epoch": 0.6567358921387408,
"grad_norm": 0.56640625,
"learning_rate": 5.490434782608696e-05,
"loss": 0.2294,
"step": 4530
},
{
"epoch": 0.6581856402450075,
"grad_norm": 0.498046875,
"learning_rate": 5.484637681159421e-05,
"loss": 0.2182,
"step": 4540
},
{
"epoch": 0.659635388351274,
"grad_norm": 0.515625,
"learning_rate": 5.478840579710145e-05,
"loss": 0.2158,
"step": 4550
},
{
"epoch": 0.6610851364575405,
"grad_norm": 0.431640625,
"learning_rate": 5.47304347826087e-05,
"loss": 0.2172,
"step": 4560
},
{
"epoch": 0.662534884563807,
"grad_norm": 0.462890625,
"learning_rate": 5.467246376811595e-05,
"loss": 0.2153,
"step": 4570
},
{
"epoch": 0.6639846326700736,
"grad_norm": 0.50390625,
"learning_rate": 5.461449275362319e-05,
"loss": 0.2123,
"step": 4580
},
{
"epoch": 0.6654343807763401,
"grad_norm": 0.447265625,
"learning_rate": 5.455652173913044e-05,
"loss": 0.2005,
"step": 4590
},
{
"epoch": 0.6668841288826066,
"grad_norm": 0.482421875,
"learning_rate": 5.449855072463768e-05,
"loss": 0.2255,
"step": 4600
},
{
"epoch": 0.6683338769888731,
"grad_norm": 0.5234375,
"learning_rate": 5.444057971014493e-05,
"loss": 0.2066,
"step": 4610
},
{
"epoch": 0.6697836250951397,
"grad_norm": 0.50390625,
"learning_rate": 5.438260869565218e-05,
"loss": 0.201,
"step": 4620
},
{
"epoch": 0.6712333732014063,
"grad_norm": 0.421875,
"learning_rate": 5.432463768115942e-05,
"loss": 0.2087,
"step": 4630
},
{
"epoch": 0.6726831213076728,
"grad_norm": 0.50390625,
"learning_rate": 5.426666666666667e-05,
"loss": 0.213,
"step": 4640
},
{
"epoch": 0.6741328694139394,
"grad_norm": 0.51171875,
"learning_rate": 5.420869565217392e-05,
"loss": 0.2118,
"step": 4650
},
{
"epoch": 0.6755826175202059,
"grad_norm": 0.4453125,
"learning_rate": 5.415072463768116e-05,
"loss": 0.2147,
"step": 4660
},
{
"epoch": 0.6770323656264724,
"grad_norm": 0.466796875,
"learning_rate": 5.4092753623188414e-05,
"loss": 0.2102,
"step": 4670
},
{
"epoch": 0.6784821137327389,
"grad_norm": 0.53515625,
"learning_rate": 5.4034782608695654e-05,
"loss": 0.2059,
"step": 4680
},
{
"epoch": 0.6799318618390054,
"grad_norm": 0.5390625,
"learning_rate": 5.39768115942029e-05,
"loss": 0.2095,
"step": 4690
},
{
"epoch": 0.681381609945272,
"grad_norm": 0.55078125,
"learning_rate": 5.3918840579710154e-05,
"loss": 0.2126,
"step": 4700
},
{
"epoch": 0.6828313580515385,
"grad_norm": 0.484375,
"learning_rate": 5.3860869565217394e-05,
"loss": 0.231,
"step": 4710
},
{
"epoch": 0.6842811061578051,
"grad_norm": 0.4765625,
"learning_rate": 5.380289855072464e-05,
"loss": 0.2029,
"step": 4720
},
{
"epoch": 0.6857308542640717,
"grad_norm": 0.4921875,
"learning_rate": 5.3744927536231895e-05,
"loss": 0.2193,
"step": 4730
},
{
"epoch": 0.6871806023703382,
"grad_norm": 0.466796875,
"learning_rate": 5.3686956521739135e-05,
"loss": 0.2097,
"step": 4740
},
{
"epoch": 0.6886303504766047,
"grad_norm": 0.5078125,
"learning_rate": 5.362898550724638e-05,
"loss": 0.2129,
"step": 4750
},
{
"epoch": 0.6900800985828712,
"grad_norm": 0.5234375,
"learning_rate": 5.357101449275362e-05,
"loss": 0.2292,
"step": 4760
},
{
"epoch": 0.6915298466891378,
"grad_norm": 0.458984375,
"learning_rate": 5.3513043478260875e-05,
"loss": 0.2149,
"step": 4770
},
{
"epoch": 0.6929795947954043,
"grad_norm": 0.55859375,
"learning_rate": 5.345507246376812e-05,
"loss": 0.2251,
"step": 4780
},
{
"epoch": 0.6944293429016708,
"grad_norm": 0.48046875,
"learning_rate": 5.339710144927536e-05,
"loss": 0.2174,
"step": 4790
},
{
"epoch": 0.6958790910079373,
"grad_norm": 0.494140625,
"learning_rate": 5.3339130434782616e-05,
"loss": 0.2145,
"step": 4800
},
{
"epoch": 0.697328839114204,
"grad_norm": 0.55078125,
"learning_rate": 5.328115942028986e-05,
"loss": 0.2216,
"step": 4810
},
{
"epoch": 0.6987785872204705,
"grad_norm": 0.59375,
"learning_rate": 5.32231884057971e-05,
"loss": 0.2123,
"step": 4820
},
{
"epoch": 0.700228335326737,
"grad_norm": 0.609375,
"learning_rate": 5.3165217391304356e-05,
"loss": 0.2101,
"step": 4830
},
{
"epoch": 0.7016780834330035,
"grad_norm": 0.54296875,
"learning_rate": 5.3107246376811597e-05,
"loss": 0.219,
"step": 4840
},
{
"epoch": 0.70312783153927,
"grad_norm": 0.51953125,
"learning_rate": 5.304927536231884e-05,
"loss": 0.2067,
"step": 4850
},
{
"epoch": 0.7045775796455366,
"grad_norm": 0.486328125,
"learning_rate": 5.299130434782609e-05,
"loss": 0.2161,
"step": 4860
},
{
"epoch": 0.7060273277518031,
"grad_norm": 0.5078125,
"learning_rate": 5.293333333333334e-05,
"loss": 0.2051,
"step": 4870
},
{
"epoch": 0.7074770758580696,
"grad_norm": 0.51171875,
"learning_rate": 5.2875362318840584e-05,
"loss": 0.2306,
"step": 4880
},
{
"epoch": 0.7089268239643362,
"grad_norm": 0.51171875,
"learning_rate": 5.281739130434783e-05,
"loss": 0.2055,
"step": 4890
},
{
"epoch": 0.7103765720706028,
"grad_norm": 0.4765625,
"learning_rate": 5.275942028985508e-05,
"loss": 0.2137,
"step": 4900
},
{
"epoch": 0.7118263201768693,
"grad_norm": 0.5390625,
"learning_rate": 5.2701449275362324e-05,
"loss": 0.2165,
"step": 4910
},
{
"epoch": 0.7132760682831358,
"grad_norm": 0.51953125,
"learning_rate": 5.264347826086957e-05,
"loss": 0.2223,
"step": 4920
},
{
"epoch": 0.7147258163894024,
"grad_norm": 0.44921875,
"learning_rate": 5.258550724637681e-05,
"loss": 0.1967,
"step": 4930
},
{
"epoch": 0.7161755644956689,
"grad_norm": 0.494140625,
"learning_rate": 5.2527536231884065e-05,
"loss": 0.21,
"step": 4940
},
{
"epoch": 0.7176253126019354,
"grad_norm": 0.494140625,
"learning_rate": 5.2469565217391305e-05,
"loss": 0.2063,
"step": 4950
},
{
"epoch": 0.7190750607082019,
"grad_norm": 0.5390625,
"learning_rate": 5.241159420289855e-05,
"loss": 0.221,
"step": 4960
},
{
"epoch": 0.7205248088144685,
"grad_norm": 0.4453125,
"learning_rate": 5.2353623188405805e-05,
"loss": 0.1838,
"step": 4970
},
{
"epoch": 0.721974556920735,
"grad_norm": 0.494140625,
"learning_rate": 5.2295652173913045e-05,
"loss": 0.196,
"step": 4980
},
{
"epoch": 0.7234243050270016,
"grad_norm": 0.53515625,
"learning_rate": 5.223768115942029e-05,
"loss": 0.2116,
"step": 4990
},
{
"epoch": 0.7248740531332681,
"grad_norm": 0.5546875,
"learning_rate": 5.2179710144927546e-05,
"loss": 0.2088,
"step": 5000
},
{
"epoch": 0.7263238012395347,
"grad_norm": 0.51171875,
"learning_rate": 5.2121739130434786e-05,
"loss": 0.214,
"step": 5010
},
{
"epoch": 0.7277735493458012,
"grad_norm": 0.4375,
"learning_rate": 5.206376811594203e-05,
"loss": 0.2118,
"step": 5020
},
{
"epoch": 0.7292232974520677,
"grad_norm": 0.494140625,
"learning_rate": 5.200579710144927e-05,
"loss": 0.2164,
"step": 5030
},
{
"epoch": 0.7306730455583342,
"grad_norm": 0.416015625,
"learning_rate": 5.1947826086956526e-05,
"loss": 0.2058,
"step": 5040
},
{
"epoch": 0.7321227936646008,
"grad_norm": 0.408203125,
"learning_rate": 5.188985507246377e-05,
"loss": 0.2079,
"step": 5050
},
{
"epoch": 0.7335725417708673,
"grad_norm": 0.36328125,
"learning_rate": 5.183188405797101e-05,
"loss": 0.1906,
"step": 5060
},
{
"epoch": 0.7350222898771338,
"grad_norm": 0.486328125,
"learning_rate": 5.177391304347827e-05,
"loss": 0.2137,
"step": 5070
},
{
"epoch": 0.7364720379834003,
"grad_norm": 0.44921875,
"learning_rate": 5.1715942028985514e-05,
"loss": 0.2153,
"step": 5080
},
{
"epoch": 0.737921786089667,
"grad_norm": 0.5703125,
"learning_rate": 5.1657971014492754e-05,
"loss": 0.2096,
"step": 5090
},
{
"epoch": 0.7393715341959335,
"grad_norm": 0.486328125,
"learning_rate": 5.160000000000001e-05,
"loss": 0.2183,
"step": 5100
},
{
"epoch": 0.7408212823022,
"grad_norm": 0.515625,
"learning_rate": 5.154202898550725e-05,
"loss": 0.2119,
"step": 5110
},
{
"epoch": 0.7422710304084665,
"grad_norm": 0.50390625,
"learning_rate": 5.1484057971014494e-05,
"loss": 0.1986,
"step": 5120
},
{
"epoch": 0.7437207785147331,
"grad_norm": 0.5,
"learning_rate": 5.142608695652175e-05,
"loss": 0.2108,
"step": 5130
},
{
"epoch": 0.7451705266209996,
"grad_norm": 0.55078125,
"learning_rate": 5.136811594202899e-05,
"loss": 0.1904,
"step": 5140
},
{
"epoch": 0.7466202747272661,
"grad_norm": 0.44140625,
"learning_rate": 5.1310144927536235e-05,
"loss": 0.2033,
"step": 5150
},
{
"epoch": 0.7480700228335326,
"grad_norm": 0.59375,
"learning_rate": 5.125217391304349e-05,
"loss": 0.1942,
"step": 5160
},
{
"epoch": 0.7495197709397992,
"grad_norm": 0.498046875,
"learning_rate": 5.119420289855073e-05,
"loss": 0.197,
"step": 5170
},
{
"epoch": 0.7509695190460658,
"grad_norm": 0.453125,
"learning_rate": 5.1136231884057975e-05,
"loss": 0.2244,
"step": 5180
},
{
"epoch": 0.7524192671523323,
"grad_norm": 0.56640625,
"learning_rate": 5.107826086956523e-05,
"loss": 0.2311,
"step": 5190
},
{
"epoch": 0.7538690152585988,
"grad_norm": 0.40625,
"learning_rate": 5.102028985507247e-05,
"loss": 0.209,
"step": 5200
},
{
"epoch": 0.7553187633648654,
"grad_norm": 0.58984375,
"learning_rate": 5.0962318840579716e-05,
"loss": 0.2031,
"step": 5210
},
{
"epoch": 0.7567685114711319,
"grad_norm": 0.482421875,
"learning_rate": 5.0904347826086956e-05,
"loss": 0.1892,
"step": 5220
},
{
"epoch": 0.7582182595773984,
"grad_norm": 0.48046875,
"learning_rate": 5.084637681159421e-05,
"loss": 0.2042,
"step": 5230
},
{
"epoch": 0.7596680076836649,
"grad_norm": 0.470703125,
"learning_rate": 5.0788405797101456e-05,
"loss": 0.1933,
"step": 5240
},
{
"epoch": 0.7611177557899315,
"grad_norm": 0.4375,
"learning_rate": 5.0730434782608696e-05,
"loss": 0.1892,
"step": 5250
},
{
"epoch": 0.762567503896198,
"grad_norm": 0.546875,
"learning_rate": 5.067246376811595e-05,
"loss": 0.201,
"step": 5260
},
{
"epoch": 0.7640172520024646,
"grad_norm": 0.455078125,
"learning_rate": 5.06144927536232e-05,
"loss": 0.1994,
"step": 5270
},
{
"epoch": 0.7654670001087311,
"grad_norm": 0.515625,
"learning_rate": 5.055652173913044e-05,
"loss": 0.1927,
"step": 5280
},
{
"epoch": 0.7669167482149977,
"grad_norm": 0.5,
"learning_rate": 5.049855072463769e-05,
"loss": 0.2018,
"step": 5290
},
{
"epoch": 0.7683664963212642,
"grad_norm": 0.5078125,
"learning_rate": 5.044057971014493e-05,
"loss": 0.2182,
"step": 5300
},
{
"epoch": 0.7698162444275307,
"grad_norm": 0.462890625,
"learning_rate": 5.038260869565218e-05,
"loss": 0.2086,
"step": 5310
},
{
"epoch": 0.7712659925337972,
"grad_norm": 0.44921875,
"learning_rate": 5.0324637681159424e-05,
"loss": 0.2077,
"step": 5320
},
{
"epoch": 0.7727157406400638,
"grad_norm": 0.478515625,
"learning_rate": 5.026666666666667e-05,
"loss": 0.202,
"step": 5330
},
{
"epoch": 0.7741654887463303,
"grad_norm": 0.4375,
"learning_rate": 5.020869565217392e-05,
"loss": 0.2135,
"step": 5340
},
{
"epoch": 0.7756152368525968,
"grad_norm": 0.52734375,
"learning_rate": 5.0150724637681165e-05,
"loss": 0.2098,
"step": 5350
},
{
"epoch": 0.7770649849588634,
"grad_norm": 0.4375,
"learning_rate": 5.009275362318841e-05,
"loss": 0.199,
"step": 5360
},
{
"epoch": 0.77851473306513,
"grad_norm": 0.447265625,
"learning_rate": 5.003478260869566e-05,
"loss": 0.1856,
"step": 5370
},
{
"epoch": 0.7799644811713965,
"grad_norm": 0.421875,
"learning_rate": 4.99768115942029e-05,
"loss": 0.1916,
"step": 5380
},
{
"epoch": 0.781414229277663,
"grad_norm": 0.5234375,
"learning_rate": 4.9918840579710145e-05,
"loss": 0.2063,
"step": 5390
},
{
"epoch": 0.7828639773839295,
"grad_norm": 0.44921875,
"learning_rate": 4.98608695652174e-05,
"loss": 0.2199,
"step": 5400
},
{
"epoch": 0.7843137254901961,
"grad_norm": 0.54296875,
"learning_rate": 4.980289855072464e-05,
"loss": 0.2127,
"step": 5410
},
{
"epoch": 0.7857634735964626,
"grad_norm": 0.46484375,
"learning_rate": 4.9744927536231886e-05,
"loss": 0.1862,
"step": 5420
},
{
"epoch": 0.7872132217027291,
"grad_norm": 0.55078125,
"learning_rate": 4.968695652173914e-05,
"loss": 0.207,
"step": 5430
},
{
"epoch": 0.7886629698089956,
"grad_norm": 0.453125,
"learning_rate": 4.962898550724638e-05,
"loss": 0.2174,
"step": 5440
},
{
"epoch": 0.7901127179152623,
"grad_norm": 0.44921875,
"learning_rate": 4.9571014492753626e-05,
"loss": 0.2056,
"step": 5450
},
{
"epoch": 0.7915624660215288,
"grad_norm": 0.625,
"learning_rate": 4.9513043478260866e-05,
"loss": 0.196,
"step": 5460
},
{
"epoch": 0.7930122141277953,
"grad_norm": 0.45703125,
"learning_rate": 4.945507246376812e-05,
"loss": 0.2067,
"step": 5470
},
{
"epoch": 0.7944619622340618,
"grad_norm": 0.5078125,
"learning_rate": 4.939710144927537e-05,
"loss": 0.2047,
"step": 5480
},
{
"epoch": 0.7959117103403284,
"grad_norm": 0.66015625,
"learning_rate": 4.933913043478261e-05,
"loss": 0.1884,
"step": 5490
},
{
"epoch": 0.7973614584465949,
"grad_norm": 0.515625,
"learning_rate": 4.928115942028986e-05,
"loss": 0.2006,
"step": 5500
},
{
"epoch": 0.7988112065528614,
"grad_norm": 0.578125,
"learning_rate": 4.922318840579711e-05,
"loss": 0.2113,
"step": 5510
},
{
"epoch": 0.8002609546591279,
"grad_norm": 0.56640625,
"learning_rate": 4.916521739130435e-05,
"loss": 0.2136,
"step": 5520
},
{
"epoch": 0.8017107027653945,
"grad_norm": 0.51171875,
"learning_rate": 4.91072463768116e-05,
"loss": 0.2066,
"step": 5530
},
{
"epoch": 0.8031604508716611,
"grad_norm": 0.482421875,
"learning_rate": 4.904927536231885e-05,
"loss": 0.193,
"step": 5540
},
{
"epoch": 0.8046101989779276,
"grad_norm": 0.474609375,
"learning_rate": 4.899130434782609e-05,
"loss": 0.2067,
"step": 5550
},
{
"epoch": 0.8060599470841942,
"grad_norm": 0.416015625,
"learning_rate": 4.893333333333334e-05,
"loss": 0.2107,
"step": 5560
},
{
"epoch": 0.8075096951904607,
"grad_norm": 0.49609375,
"learning_rate": 4.887536231884058e-05,
"loss": 0.1996,
"step": 5570
},
{
"epoch": 0.8089594432967272,
"grad_norm": 0.59765625,
"learning_rate": 4.881739130434783e-05,
"loss": 0.2147,
"step": 5580
},
{
"epoch": 0.8104091914029937,
"grad_norm": 0.484375,
"learning_rate": 4.875942028985508e-05,
"loss": 0.2027,
"step": 5590
},
{
"epoch": 0.8118589395092602,
"grad_norm": 0.640625,
"learning_rate": 4.870144927536232e-05,
"loss": 0.2219,
"step": 5600
},
{
"epoch": 0.8133086876155268,
"grad_norm": 0.455078125,
"learning_rate": 4.864347826086957e-05,
"loss": 0.2009,
"step": 5610
},
{
"epoch": 0.8147584357217933,
"grad_norm": 0.498046875,
"learning_rate": 4.858550724637682e-05,
"loss": 0.2132,
"step": 5620
},
{
"epoch": 0.8162081838280599,
"grad_norm": 0.443359375,
"learning_rate": 4.852753623188406e-05,
"loss": 0.2011,
"step": 5630
},
{
"epoch": 0.8176579319343265,
"grad_norm": 0.52734375,
"learning_rate": 4.846956521739131e-05,
"loss": 0.2128,
"step": 5640
},
{
"epoch": 0.819107680040593,
"grad_norm": 0.494140625,
"learning_rate": 4.841159420289855e-05,
"loss": 0.2164,
"step": 5650
},
{
"epoch": 0.8205574281468595,
"grad_norm": 0.53125,
"learning_rate": 4.83536231884058e-05,
"loss": 0.2024,
"step": 5660
},
{
"epoch": 0.822007176253126,
"grad_norm": 0.5,
"learning_rate": 4.829565217391305e-05,
"loss": 0.1931,
"step": 5670
},
{
"epoch": 0.8234569243593926,
"grad_norm": 0.40234375,
"learning_rate": 4.823768115942029e-05,
"loss": 0.1904,
"step": 5680
},
{
"epoch": 0.8249066724656591,
"grad_norm": 0.486328125,
"learning_rate": 4.8179710144927543e-05,
"loss": 0.1982,
"step": 5690
},
{
"epoch": 0.8263564205719256,
"grad_norm": 0.462890625,
"learning_rate": 4.812173913043479e-05,
"loss": 0.207,
"step": 5700
},
{
"epoch": 0.8278061686781921,
"grad_norm": 0.53125,
"learning_rate": 4.806376811594203e-05,
"loss": 0.1984,
"step": 5710
},
{
"epoch": 0.8292559167844586,
"grad_norm": 0.45703125,
"learning_rate": 4.8005797101449284e-05,
"loss": 0.2153,
"step": 5720
},
{
"epoch": 0.8307056648907253,
"grad_norm": 0.55078125,
"learning_rate": 4.7947826086956524e-05,
"loss": 0.203,
"step": 5730
},
{
"epoch": 0.8321554129969918,
"grad_norm": 0.451171875,
"learning_rate": 4.788985507246377e-05,
"loss": 0.2196,
"step": 5740
},
{
"epoch": 0.8336051611032583,
"grad_norm": 0.515625,
"learning_rate": 4.783188405797102e-05,
"loss": 0.2112,
"step": 5750
},
{
"epoch": 0.8350549092095249,
"grad_norm": 0.490234375,
"learning_rate": 4.7773913043478265e-05,
"loss": 0.2122,
"step": 5760
},
{
"epoch": 0.8365046573157914,
"grad_norm": 0.546875,
"learning_rate": 4.771594202898551e-05,
"loss": 0.215,
"step": 5770
},
{
"epoch": 0.8379544054220579,
"grad_norm": 0.400390625,
"learning_rate": 4.765797101449276e-05,
"loss": 0.2039,
"step": 5780
},
{
"epoch": 0.8394041535283244,
"grad_norm": 0.4375,
"learning_rate": 4.7600000000000005e-05,
"loss": 0.1996,
"step": 5790
},
{
"epoch": 0.840853901634591,
"grad_norm": 0.46875,
"learning_rate": 4.754202898550725e-05,
"loss": 0.2037,
"step": 5800
},
{
"epoch": 0.8423036497408575,
"grad_norm": 0.51171875,
"learning_rate": 4.748405797101449e-05,
"loss": 0.2036,
"step": 5810
},
{
"epoch": 0.8437533978471241,
"grad_norm": 0.498046875,
"learning_rate": 4.742608695652174e-05,
"loss": 0.2033,
"step": 5820
},
{
"epoch": 0.8452031459533906,
"grad_norm": 0.486328125,
"learning_rate": 4.736811594202899e-05,
"loss": 0.2069,
"step": 5830
},
{
"epoch": 0.8466528940596572,
"grad_norm": 0.57421875,
"learning_rate": 4.731014492753623e-05,
"loss": 0.2093,
"step": 5840
},
{
"epoch": 0.8481026421659237,
"grad_norm": 0.408203125,
"learning_rate": 4.725217391304348e-05,
"loss": 0.2062,
"step": 5850
},
{
"epoch": 0.8495523902721902,
"grad_norm": 0.44921875,
"learning_rate": 4.719420289855073e-05,
"loss": 0.1895,
"step": 5860
},
{
"epoch": 0.8510021383784567,
"grad_norm": 0.474609375,
"learning_rate": 4.713623188405797e-05,
"loss": 0.2097,
"step": 5870
},
{
"epoch": 0.8524518864847233,
"grad_norm": 0.427734375,
"learning_rate": 4.707826086956522e-05,
"loss": 0.194,
"step": 5880
},
{
"epoch": 0.8539016345909898,
"grad_norm": 0.421875,
"learning_rate": 4.702028985507247e-05,
"loss": 0.2018,
"step": 5890
},
{
"epoch": 0.8553513826972563,
"grad_norm": 0.453125,
"learning_rate": 4.6962318840579713e-05,
"loss": 0.1964,
"step": 5900
},
{
"epoch": 0.8568011308035229,
"grad_norm": 0.53515625,
"learning_rate": 4.690434782608696e-05,
"loss": 0.2077,
"step": 5910
},
{
"epoch": 0.8582508789097895,
"grad_norm": 0.466796875,
"learning_rate": 4.68463768115942e-05,
"loss": 0.2151,
"step": 5920
},
{
"epoch": 0.859700627016056,
"grad_norm": 0.458984375,
"learning_rate": 4.6788405797101454e-05,
"loss": 0.1957,
"step": 5930
},
{
"epoch": 0.8611503751223225,
"grad_norm": 0.55078125,
"learning_rate": 4.67304347826087e-05,
"loss": 0.2102,
"step": 5940
},
{
"epoch": 0.862600123228589,
"grad_norm": 0.3828125,
"learning_rate": 4.667246376811594e-05,
"loss": 0.2003,
"step": 5950
},
{
"epoch": 0.8640498713348556,
"grad_norm": 0.37109375,
"learning_rate": 4.6614492753623194e-05,
"loss": 0.1804,
"step": 5960
},
{
"epoch": 0.8654996194411221,
"grad_norm": 0.45703125,
"learning_rate": 4.655652173913044e-05,
"loss": 0.1918,
"step": 5970
},
{
"epoch": 0.8669493675473886,
"grad_norm": 0.52734375,
"learning_rate": 4.649855072463768e-05,
"loss": 0.1922,
"step": 5980
},
{
"epoch": 0.8683991156536551,
"grad_norm": 0.482421875,
"learning_rate": 4.6440579710144935e-05,
"loss": 0.2057,
"step": 5990
},
{
"epoch": 0.8698488637599218,
"grad_norm": 0.44921875,
"learning_rate": 4.6382608695652175e-05,
"loss": 0.2022,
"step": 6000
},
{
"epoch": 0.8712986118661883,
"grad_norm": 0.419921875,
"learning_rate": 4.632463768115942e-05,
"loss": 0.2018,
"step": 6010
},
{
"epoch": 0.8727483599724548,
"grad_norm": 0.5078125,
"learning_rate": 4.6266666666666675e-05,
"loss": 0.1959,
"step": 6020
},
{
"epoch": 0.8741981080787213,
"grad_norm": 0.484375,
"learning_rate": 4.6208695652173916e-05,
"loss": 0.1936,
"step": 6030
},
{
"epoch": 0.8756478561849879,
"grad_norm": 0.486328125,
"learning_rate": 4.615072463768116e-05,
"loss": 0.2097,
"step": 6040
},
{
"epoch": 0.8770976042912544,
"grad_norm": 0.482421875,
"learning_rate": 4.6092753623188416e-05,
"loss": 0.2049,
"step": 6050
},
{
"epoch": 0.8785473523975209,
"grad_norm": 0.46875,
"learning_rate": 4.6034782608695656e-05,
"loss": 0.2022,
"step": 6060
},
{
"epoch": 0.8799971005037874,
"grad_norm": 0.5,
"learning_rate": 4.59768115942029e-05,
"loss": 0.2076,
"step": 6070
},
{
"epoch": 0.881446848610054,
"grad_norm": 0.466796875,
"learning_rate": 4.591884057971014e-05,
"loss": 0.2126,
"step": 6080
},
{
"epoch": 0.8828965967163206,
"grad_norm": 0.37890625,
"learning_rate": 4.5860869565217397e-05,
"loss": 0.2006,
"step": 6090
},
{
"epoch": 0.8843463448225871,
"grad_norm": 0.45703125,
"learning_rate": 4.580289855072464e-05,
"loss": 0.1969,
"step": 6100
},
{
"epoch": 0.8857960929288536,
"grad_norm": 0.45703125,
"learning_rate": 4.5744927536231883e-05,
"loss": 0.1912,
"step": 6110
},
{
"epoch": 0.8872458410351202,
"grad_norm": 0.478515625,
"learning_rate": 4.568695652173914e-05,
"loss": 0.2007,
"step": 6120
},
{
"epoch": 0.8886955891413867,
"grad_norm": 0.384765625,
"learning_rate": 4.5628985507246384e-05,
"loss": 0.209,
"step": 6130
},
{
"epoch": 0.8901453372476532,
"grad_norm": 0.470703125,
"learning_rate": 4.5571014492753624e-05,
"loss": 0.2011,
"step": 6140
},
{
"epoch": 0.8915950853539197,
"grad_norm": 0.59375,
"learning_rate": 4.551304347826088e-05,
"loss": 0.2006,
"step": 6150
},
{
"epoch": 0.8930448334601863,
"grad_norm": 0.46875,
"learning_rate": 4.5455072463768124e-05,
"loss": 0.2179,
"step": 6160
},
{
"epoch": 0.8944945815664528,
"grad_norm": 0.4921875,
"learning_rate": 4.5397101449275364e-05,
"loss": 0.195,
"step": 6170
},
{
"epoch": 0.8959443296727194,
"grad_norm": 0.5,
"learning_rate": 4.533913043478261e-05,
"loss": 0.2011,
"step": 6180
},
{
"epoch": 0.8973940777789859,
"grad_norm": 0.5625,
"learning_rate": 4.528115942028986e-05,
"loss": 0.2219,
"step": 6190
},
{
"epoch": 0.8988438258852525,
"grad_norm": 0.451171875,
"learning_rate": 4.5223188405797105e-05,
"loss": 0.1897,
"step": 6200
},
{
"epoch": 0.900293573991519,
"grad_norm": 0.423828125,
"learning_rate": 4.516521739130435e-05,
"loss": 0.1987,
"step": 6210
},
{
"epoch": 0.9017433220977855,
"grad_norm": 0.49609375,
"learning_rate": 4.51072463768116e-05,
"loss": 0.2126,
"step": 6220
},
{
"epoch": 0.903193070204052,
"grad_norm": 0.498046875,
"learning_rate": 4.5049275362318845e-05,
"loss": 0.2074,
"step": 6230
},
{
"epoch": 0.9046428183103186,
"grad_norm": 0.50390625,
"learning_rate": 4.499130434782609e-05,
"loss": 0.196,
"step": 6240
},
{
"epoch": 0.9060925664165851,
"grad_norm": 0.50390625,
"learning_rate": 4.493333333333333e-05,
"loss": 0.1886,
"step": 6250
},
{
"epoch": 0.9075423145228516,
"grad_norm": 0.396484375,
"learning_rate": 4.4875362318840586e-05,
"loss": 0.2042,
"step": 6260
},
{
"epoch": 0.9089920626291181,
"grad_norm": 0.46875,
"learning_rate": 4.4817391304347826e-05,
"loss": 0.1852,
"step": 6270
},
{
"epoch": 0.9104418107353848,
"grad_norm": 0.421875,
"learning_rate": 4.475942028985507e-05,
"loss": 0.2026,
"step": 6280
},
{
"epoch": 0.9118915588416513,
"grad_norm": 0.443359375,
"learning_rate": 4.4701449275362326e-05,
"loss": 0.1958,
"step": 6290
},
{
"epoch": 0.9133413069479178,
"grad_norm": 0.466796875,
"learning_rate": 4.4643478260869566e-05,
"loss": 0.1993,
"step": 6300
},
{
"epoch": 0.9147910550541843,
"grad_norm": 0.56640625,
"learning_rate": 4.458550724637681e-05,
"loss": 0.1972,
"step": 6310
},
{
"epoch": 0.9162408031604509,
"grad_norm": 0.3828125,
"learning_rate": 4.452753623188407e-05,
"loss": 0.2089,
"step": 6320
},
{
"epoch": 0.9176905512667174,
"grad_norm": 0.41796875,
"learning_rate": 4.446956521739131e-05,
"loss": 0.2033,
"step": 6330
},
{
"epoch": 0.9191402993729839,
"grad_norm": 0.5078125,
"learning_rate": 4.4411594202898554e-05,
"loss": 0.19,
"step": 6340
},
{
"epoch": 0.9205900474792504,
"grad_norm": 0.57421875,
"learning_rate": 4.4353623188405794e-05,
"loss": 0.1899,
"step": 6350
},
{
"epoch": 0.922039795585517,
"grad_norm": 0.5859375,
"learning_rate": 4.429565217391305e-05,
"loss": 0.1788,
"step": 6360
},
{
"epoch": 0.9234895436917836,
"grad_norm": 0.423828125,
"learning_rate": 4.4237681159420294e-05,
"loss": 0.191,
"step": 6370
},
{
"epoch": 0.9249392917980501,
"grad_norm": 0.4375,
"learning_rate": 4.4179710144927534e-05,
"loss": 0.1876,
"step": 6380
},
{
"epoch": 0.9263890399043166,
"grad_norm": 0.466796875,
"learning_rate": 4.412173913043479e-05,
"loss": 0.1953,
"step": 6390
},
{
"epoch": 0.9278387880105832,
"grad_norm": 0.5625,
"learning_rate": 4.4063768115942035e-05,
"loss": 0.2059,
"step": 6400
},
{
"epoch": 0.9292885361168497,
"grad_norm": 0.51953125,
"learning_rate": 4.4005797101449275e-05,
"loss": 0.2096,
"step": 6410
},
{
"epoch": 0.9307382842231162,
"grad_norm": 0.392578125,
"learning_rate": 4.394782608695653e-05,
"loss": 0.1669,
"step": 6420
},
{
"epoch": 0.9321880323293827,
"grad_norm": 0.486328125,
"learning_rate": 4.388985507246377e-05,
"loss": 0.213,
"step": 6430
},
{
"epoch": 0.9336377804356493,
"grad_norm": 0.466796875,
"learning_rate": 4.3831884057971015e-05,
"loss": 0.21,
"step": 6440
},
{
"epoch": 0.9350875285419158,
"grad_norm": 0.5390625,
"learning_rate": 4.377391304347827e-05,
"loss": 0.197,
"step": 6450
},
{
"epoch": 0.9365372766481824,
"grad_norm": 0.515625,
"learning_rate": 4.371594202898551e-05,
"loss": 0.2045,
"step": 6460
},
{
"epoch": 0.937987024754449,
"grad_norm": 0.408203125,
"learning_rate": 4.3657971014492756e-05,
"loss": 0.2076,
"step": 6470
},
{
"epoch": 0.9394367728607155,
"grad_norm": 0.373046875,
"learning_rate": 4.360000000000001e-05,
"loss": 0.216,
"step": 6480
},
{
"epoch": 0.940886520966982,
"grad_norm": 0.376953125,
"learning_rate": 4.354202898550725e-05,
"loss": 0.1929,
"step": 6490
},
{
"epoch": 0.9423362690732485,
"grad_norm": 0.470703125,
"learning_rate": 4.3484057971014496e-05,
"loss": 0.1862,
"step": 6500
},
{
"epoch": 0.943786017179515,
"grad_norm": 0.3828125,
"learning_rate": 4.342608695652175e-05,
"loss": 0.2018,
"step": 6510
},
{
"epoch": 0.9452357652857816,
"grad_norm": 0.50390625,
"learning_rate": 4.336811594202899e-05,
"loss": 0.1907,
"step": 6520
},
{
"epoch": 0.9466855133920481,
"grad_norm": 0.42578125,
"learning_rate": 4.331014492753624e-05,
"loss": 0.199,
"step": 6530
},
{
"epoch": 0.9481352614983146,
"grad_norm": 0.45703125,
"learning_rate": 4.325217391304348e-05,
"loss": 0.2044,
"step": 6540
},
{
"epoch": 0.9495850096045813,
"grad_norm": 0.39453125,
"learning_rate": 4.319420289855073e-05,
"loss": 0.2035,
"step": 6550
},
{
"epoch": 0.9510347577108478,
"grad_norm": 0.4609375,
"learning_rate": 4.313623188405798e-05,
"loss": 0.2034,
"step": 6560
},
{
"epoch": 0.9524845058171143,
"grad_norm": 0.482421875,
"learning_rate": 4.307826086956522e-05,
"loss": 0.1963,
"step": 6570
},
{
"epoch": 0.9539342539233808,
"grad_norm": 0.4921875,
"learning_rate": 4.302028985507247e-05,
"loss": 0.1931,
"step": 6580
},
{
"epoch": 0.9553840020296474,
"grad_norm": 0.455078125,
"learning_rate": 4.296231884057972e-05,
"loss": 0.1865,
"step": 6590
},
{
"epoch": 0.9568337501359139,
"grad_norm": 0.427734375,
"learning_rate": 4.290434782608696e-05,
"loss": 0.1951,
"step": 6600
},
{
"epoch": 0.9582834982421804,
"grad_norm": 0.462890625,
"learning_rate": 4.2846376811594205e-05,
"loss": 0.2007,
"step": 6610
},
{
"epoch": 0.9597332463484469,
"grad_norm": 0.478515625,
"learning_rate": 4.278840579710145e-05,
"loss": 0.1724,
"step": 6620
},
{
"epoch": 0.9611829944547134,
"grad_norm": 0.458984375,
"learning_rate": 4.27304347826087e-05,
"loss": 0.2057,
"step": 6630
},
{
"epoch": 0.9626327425609801,
"grad_norm": 0.388671875,
"learning_rate": 4.2672463768115945e-05,
"loss": 0.1819,
"step": 6640
},
{
"epoch": 0.9640824906672466,
"grad_norm": 0.443359375,
"learning_rate": 4.261449275362319e-05,
"loss": 0.2051,
"step": 6650
},
{
"epoch": 0.9655322387735131,
"grad_norm": 0.50390625,
"learning_rate": 4.255652173913044e-05,
"loss": 0.2099,
"step": 6660
},
{
"epoch": 0.9669819868797797,
"grad_norm": 0.453125,
"learning_rate": 4.2498550724637686e-05,
"loss": 0.2049,
"step": 6670
},
{
"epoch": 0.9684317349860462,
"grad_norm": 0.447265625,
"learning_rate": 4.2440579710144926e-05,
"loss": 0.1948,
"step": 6680
},
{
"epoch": 0.9698814830923127,
"grad_norm": 0.4140625,
"learning_rate": 4.238260869565218e-05,
"loss": 0.1893,
"step": 6690
},
{
"epoch": 0.9713312311985792,
"grad_norm": 0.44921875,
"learning_rate": 4.232463768115942e-05,
"loss": 0.1866,
"step": 6700
},
{
"epoch": 0.9727809793048457,
"grad_norm": 0.5703125,
"learning_rate": 4.2266666666666666e-05,
"loss": 0.2107,
"step": 6710
},
{
"epoch": 0.9742307274111123,
"grad_norm": 0.439453125,
"learning_rate": 4.220869565217392e-05,
"loss": 0.1822,
"step": 6720
},
{
"epoch": 0.9756804755173789,
"grad_norm": 0.455078125,
"learning_rate": 4.215072463768116e-05,
"loss": 0.2068,
"step": 6730
},
{
"epoch": 0.9771302236236454,
"grad_norm": 0.458984375,
"learning_rate": 4.209275362318841e-05,
"loss": 0.1854,
"step": 6740
},
{
"epoch": 0.978579971729912,
"grad_norm": 0.412109375,
"learning_rate": 4.203478260869566e-05,
"loss": 0.1964,
"step": 6750
},
{
"epoch": 0.9800297198361785,
"grad_norm": 0.55078125,
"learning_rate": 4.19768115942029e-05,
"loss": 0.1947,
"step": 6760
},
{
"epoch": 0.981479467942445,
"grad_norm": 0.486328125,
"learning_rate": 4.191884057971015e-05,
"loss": 0.191,
"step": 6770
},
{
"epoch": 0.9829292160487115,
"grad_norm": 0.462890625,
"learning_rate": 4.18608695652174e-05,
"loss": 0.1893,
"step": 6780
},
{
"epoch": 0.984378964154978,
"grad_norm": 0.462890625,
"learning_rate": 4.180289855072464e-05,
"loss": 0.1981,
"step": 6790
},
{
"epoch": 0.9858287122612446,
"grad_norm": 0.474609375,
"learning_rate": 4.174492753623189e-05,
"loss": 0.2046,
"step": 6800
},
{
"epoch": 0.9872784603675111,
"grad_norm": 0.5,
"learning_rate": 4.168695652173913e-05,
"loss": 0.2136,
"step": 6810
},
{
"epoch": 0.9887282084737777,
"grad_norm": 0.54296875,
"learning_rate": 4.162898550724638e-05,
"loss": 0.1955,
"step": 6820
},
{
"epoch": 0.9901779565800443,
"grad_norm": 0.46484375,
"learning_rate": 4.157101449275363e-05,
"loss": 0.191,
"step": 6830
},
{
"epoch": 0.9916277046863108,
"grad_norm": 0.51171875,
"learning_rate": 4.151304347826087e-05,
"loss": 0.1847,
"step": 6840
},
{
"epoch": 0.9930774527925773,
"grad_norm": 0.427734375,
"learning_rate": 4.145507246376812e-05,
"loss": 0.1887,
"step": 6850
},
{
"epoch": 0.9945272008988438,
"grad_norm": 0.53125,
"learning_rate": 4.139710144927537e-05,
"loss": 0.2071,
"step": 6860
},
{
"epoch": 0.9959769490051104,
"grad_norm": 0.490234375,
"learning_rate": 4.133913043478261e-05,
"loss": 0.2111,
"step": 6870
},
{
"epoch": 0.9974266971113769,
"grad_norm": 0.45703125,
"learning_rate": 4.128115942028986e-05,
"loss": 0.2007,
"step": 6880
},
{
"epoch": 0.9988764452176434,
"grad_norm": 0.490234375,
"learning_rate": 4.12231884057971e-05,
"loss": 0.1805,
"step": 6890
},
{
"epoch": 1.0002899496212534,
"grad_norm": 0.376953125,
"learning_rate": 4.116521739130435e-05,
"loss": 0.1933,
"step": 6900
},
{
"epoch": 1.00173969772752,
"grad_norm": 0.38671875,
"learning_rate": 4.11072463768116e-05,
"loss": 0.1644,
"step": 6910
},
{
"epoch": 1.0031894458337864,
"grad_norm": 0.484375,
"learning_rate": 4.104927536231884e-05,
"loss": 0.1714,
"step": 6920
},
{
"epoch": 1.004639193940053,
"grad_norm": 0.392578125,
"learning_rate": 4.099130434782609e-05,
"loss": 0.1744,
"step": 6930
},
{
"epoch": 1.0060889420463195,
"grad_norm": 0.3828125,
"learning_rate": 4.0933333333333343e-05,
"loss": 0.1468,
"step": 6940
},
{
"epoch": 1.007538690152586,
"grad_norm": 0.408203125,
"learning_rate": 4.0875362318840584e-05,
"loss": 0.1828,
"step": 6950
},
{
"epoch": 1.0089884382588525,
"grad_norm": 0.361328125,
"learning_rate": 4.081739130434783e-05,
"loss": 0.184,
"step": 6960
},
{
"epoch": 1.010438186365119,
"grad_norm": 0.458984375,
"learning_rate": 4.075942028985507e-05,
"loss": 0.1601,
"step": 6970
},
{
"epoch": 1.0118879344713856,
"grad_norm": 0.35546875,
"learning_rate": 4.0701449275362324e-05,
"loss": 0.1685,
"step": 6980
},
{
"epoch": 1.013337682577652,
"grad_norm": 0.392578125,
"learning_rate": 4.064347826086957e-05,
"loss": 0.1641,
"step": 6990
},
{
"epoch": 1.0147874306839186,
"grad_norm": 0.4453125,
"learning_rate": 4.058550724637681e-05,
"loss": 0.1809,
"step": 7000
},
{
"epoch": 1.0162371787901852,
"grad_norm": 0.4453125,
"learning_rate": 4.0527536231884065e-05,
"loss": 0.1669,
"step": 7010
},
{
"epoch": 1.0176869268964517,
"grad_norm": 0.423828125,
"learning_rate": 4.046956521739131e-05,
"loss": 0.1832,
"step": 7020
},
{
"epoch": 1.0191366750027182,
"grad_norm": 0.376953125,
"learning_rate": 4.041159420289855e-05,
"loss": 0.1815,
"step": 7030
},
{
"epoch": 1.0205864231089847,
"grad_norm": 0.380859375,
"learning_rate": 4.03536231884058e-05,
"loss": 0.1607,
"step": 7040
},
{
"epoch": 1.0220361712152513,
"grad_norm": 0.3515625,
"learning_rate": 4.0295652173913045e-05,
"loss": 0.1803,
"step": 7050
},
{
"epoch": 1.0234859193215178,
"grad_norm": 0.3828125,
"learning_rate": 4.023768115942029e-05,
"loss": 0.1759,
"step": 7060
},
{
"epoch": 1.0249356674277845,
"grad_norm": 0.4296875,
"learning_rate": 4.017971014492754e-05,
"loss": 0.152,
"step": 7070
},
{
"epoch": 1.026385415534051,
"grad_norm": 0.427734375,
"learning_rate": 4.0121739130434786e-05,
"loss": 0.1854,
"step": 7080
},
{
"epoch": 1.0278351636403176,
"grad_norm": 0.40625,
"learning_rate": 4.006376811594203e-05,
"loss": 0.16,
"step": 7090
},
{
"epoch": 1.029284911746584,
"grad_norm": 0.365234375,
"learning_rate": 4.000579710144928e-05,
"loss": 0.1726,
"step": 7100
},
{
"epoch": 1.0307346598528506,
"grad_norm": 0.451171875,
"learning_rate": 3.9947826086956526e-05,
"loss": 0.1467,
"step": 7110
},
{
"epoch": 1.0321844079591171,
"grad_norm": 0.43359375,
"learning_rate": 3.988985507246377e-05,
"loss": 0.1691,
"step": 7120
},
{
"epoch": 1.0336341560653837,
"grad_norm": 0.404296875,
"learning_rate": 3.983188405797102e-05,
"loss": 0.1769,
"step": 7130
},
{
"epoch": 1.0350839041716502,
"grad_norm": 0.3671875,
"learning_rate": 3.977391304347826e-05,
"loss": 0.1882,
"step": 7140
},
{
"epoch": 1.0365336522779167,
"grad_norm": 0.400390625,
"learning_rate": 3.9715942028985513e-05,
"loss": 0.1486,
"step": 7150
},
{
"epoch": 1.0379834003841832,
"grad_norm": 0.416015625,
"learning_rate": 3.965797101449276e-05,
"loss": 0.1615,
"step": 7160
},
{
"epoch": 1.0394331484904498,
"grad_norm": 0.41796875,
"learning_rate": 3.96e-05,
"loss": 0.1775,
"step": 7170
},
{
"epoch": 1.0408828965967163,
"grad_norm": 0.451171875,
"learning_rate": 3.954202898550725e-05,
"loss": 0.1646,
"step": 7180
},
{
"epoch": 1.0423326447029828,
"grad_norm": 0.380859375,
"learning_rate": 3.94840579710145e-05,
"loss": 0.1803,
"step": 7190
},
{
"epoch": 1.0437823928092493,
"grad_norm": 0.37109375,
"learning_rate": 3.942608695652174e-05,
"loss": 0.1701,
"step": 7200
},
{
"epoch": 1.0452321409155159,
"grad_norm": 0.46484375,
"learning_rate": 3.936811594202899e-05,
"loss": 0.1825,
"step": 7210
},
{
"epoch": 1.0466818890217824,
"grad_norm": 0.443359375,
"learning_rate": 3.9310144927536234e-05,
"loss": 0.1608,
"step": 7220
},
{
"epoch": 1.048131637128049,
"grad_norm": 0.375,
"learning_rate": 3.925217391304348e-05,
"loss": 0.1573,
"step": 7230
},
{
"epoch": 1.0495813852343154,
"grad_norm": 0.365234375,
"learning_rate": 3.919420289855073e-05,
"loss": 0.1663,
"step": 7240
},
{
"epoch": 1.0510311333405822,
"grad_norm": 0.451171875,
"learning_rate": 3.9136231884057975e-05,
"loss": 0.1699,
"step": 7250
},
{
"epoch": 1.0524808814468487,
"grad_norm": 0.451171875,
"learning_rate": 3.907826086956522e-05,
"loss": 0.1592,
"step": 7260
},
{
"epoch": 1.0539306295531152,
"grad_norm": 0.455078125,
"learning_rate": 3.902028985507247e-05,
"loss": 0.164,
"step": 7270
},
{
"epoch": 1.0553803776593818,
"grad_norm": 0.412109375,
"learning_rate": 3.8962318840579715e-05,
"loss": 0.1752,
"step": 7280
},
{
"epoch": 1.0568301257656483,
"grad_norm": 0.400390625,
"learning_rate": 3.8904347826086956e-05,
"loss": 0.1536,
"step": 7290
},
{
"epoch": 1.0582798738719148,
"grad_norm": 0.357421875,
"learning_rate": 3.88463768115942e-05,
"loss": 0.1703,
"step": 7300
},
{
"epoch": 1.0597296219781813,
"grad_norm": 0.431640625,
"learning_rate": 3.8788405797101456e-05,
"loss": 0.1652,
"step": 7310
},
{
"epoch": 1.0611793700844478,
"grad_norm": 0.392578125,
"learning_rate": 3.8730434782608696e-05,
"loss": 0.1657,
"step": 7320
},
{
"epoch": 1.0626291181907144,
"grad_norm": 0.39453125,
"learning_rate": 3.867246376811594e-05,
"loss": 0.1645,
"step": 7330
},
{
"epoch": 1.064078866296981,
"grad_norm": 0.375,
"learning_rate": 3.861449275362319e-05,
"loss": 0.166,
"step": 7340
},
{
"epoch": 1.0655286144032474,
"grad_norm": 0.412109375,
"learning_rate": 3.8556521739130437e-05,
"loss": 0.1553,
"step": 7350
},
{
"epoch": 1.066978362509514,
"grad_norm": 0.419921875,
"learning_rate": 3.8498550724637683e-05,
"loss": 0.1608,
"step": 7360
},
{
"epoch": 1.0684281106157805,
"grad_norm": 0.43359375,
"learning_rate": 3.844057971014493e-05,
"loss": 0.174,
"step": 7370
},
{
"epoch": 1.069877858722047,
"grad_norm": 0.419921875,
"learning_rate": 3.838260869565218e-05,
"loss": 0.1674,
"step": 7380
},
{
"epoch": 1.0713276068283135,
"grad_norm": 0.447265625,
"learning_rate": 3.8324637681159424e-05,
"loss": 0.1702,
"step": 7390
},
{
"epoch": 1.07277735493458,
"grad_norm": 0.435546875,
"learning_rate": 3.826666666666667e-05,
"loss": 0.1698,
"step": 7400
},
{
"epoch": 1.0742271030408466,
"grad_norm": 0.48046875,
"learning_rate": 3.820869565217392e-05,
"loss": 0.1638,
"step": 7410
},
{
"epoch": 1.075676851147113,
"grad_norm": 0.42578125,
"learning_rate": 3.8150724637681164e-05,
"loss": 0.1835,
"step": 7420
},
{
"epoch": 1.0771265992533796,
"grad_norm": 0.359375,
"learning_rate": 3.809275362318841e-05,
"loss": 0.1662,
"step": 7430
},
{
"epoch": 1.0785763473596464,
"grad_norm": 0.4453125,
"learning_rate": 3.803478260869566e-05,
"loss": 0.1608,
"step": 7440
},
{
"epoch": 1.0800260954659129,
"grad_norm": 0.416015625,
"learning_rate": 3.79768115942029e-05,
"loss": 0.1768,
"step": 7450
},
{
"epoch": 1.0814758435721794,
"grad_norm": 0.427734375,
"learning_rate": 3.791884057971015e-05,
"loss": 0.1661,
"step": 7460
},
{
"epoch": 1.082925591678446,
"grad_norm": 0.515625,
"learning_rate": 3.78608695652174e-05,
"loss": 0.1672,
"step": 7470
},
{
"epoch": 1.0843753397847125,
"grad_norm": 0.376953125,
"learning_rate": 3.780289855072464e-05,
"loss": 0.167,
"step": 7480
},
{
"epoch": 1.085825087890979,
"grad_norm": 0.421875,
"learning_rate": 3.7744927536231885e-05,
"loss": 0.1695,
"step": 7490
},
{
"epoch": 1.0872748359972455,
"grad_norm": 0.482421875,
"learning_rate": 3.768695652173913e-05,
"loss": 0.1663,
"step": 7500
},
{
"epoch": 1.088724584103512,
"grad_norm": 0.4765625,
"learning_rate": 3.762898550724638e-05,
"loss": 0.168,
"step": 7510
},
{
"epoch": 1.0901743322097786,
"grad_norm": 0.443359375,
"learning_rate": 3.7571014492753626e-05,
"loss": 0.175,
"step": 7520
},
{
"epoch": 1.091624080316045,
"grad_norm": 0.4375,
"learning_rate": 3.751304347826087e-05,
"loss": 0.1669,
"step": 7530
},
{
"epoch": 1.0930738284223116,
"grad_norm": 0.53515625,
"learning_rate": 3.745507246376812e-05,
"loss": 0.1618,
"step": 7540
},
{
"epoch": 1.0945235765285781,
"grad_norm": 0.36328125,
"learning_rate": 3.7397101449275366e-05,
"loss": 0.1663,
"step": 7550
},
{
"epoch": 1.0959733246348446,
"grad_norm": 0.3828125,
"learning_rate": 3.733913043478261e-05,
"loss": 0.1564,
"step": 7560
},
{
"epoch": 1.0974230727411112,
"grad_norm": 0.470703125,
"learning_rate": 3.728115942028985e-05,
"loss": 0.1635,
"step": 7570
},
{
"epoch": 1.0988728208473777,
"grad_norm": 0.41796875,
"learning_rate": 3.722318840579711e-05,
"loss": 0.1587,
"step": 7580
},
{
"epoch": 1.1003225689536442,
"grad_norm": 0.44140625,
"learning_rate": 3.7165217391304354e-05,
"loss": 0.1623,
"step": 7590
},
{
"epoch": 1.1017723170599107,
"grad_norm": 0.3984375,
"learning_rate": 3.7107246376811594e-05,
"loss": 0.1672,
"step": 7600
},
{
"epoch": 1.1032220651661775,
"grad_norm": 0.427734375,
"learning_rate": 3.704927536231884e-05,
"loss": 0.1784,
"step": 7610
},
{
"epoch": 1.104671813272444,
"grad_norm": 0.3828125,
"learning_rate": 3.6991304347826094e-05,
"loss": 0.1744,
"step": 7620
},
{
"epoch": 1.1061215613787105,
"grad_norm": 0.408203125,
"learning_rate": 3.6933333333333334e-05,
"loss": 0.1582,
"step": 7630
},
{
"epoch": 1.107571309484977,
"grad_norm": 0.470703125,
"learning_rate": 3.687536231884058e-05,
"loss": 0.164,
"step": 7640
},
{
"epoch": 1.1090210575912436,
"grad_norm": 0.376953125,
"learning_rate": 3.681739130434783e-05,
"loss": 0.1606,
"step": 7650
},
{
"epoch": 1.11047080569751,
"grad_norm": 0.412109375,
"learning_rate": 3.6759420289855075e-05,
"loss": 0.1787,
"step": 7660
},
{
"epoch": 1.1119205538037766,
"grad_norm": 0.40234375,
"learning_rate": 3.670144927536232e-05,
"loss": 0.1692,
"step": 7670
},
{
"epoch": 1.1133703019100432,
"grad_norm": 0.44921875,
"learning_rate": 3.664347826086957e-05,
"loss": 0.1527,
"step": 7680
},
{
"epoch": 1.1148200500163097,
"grad_norm": 0.443359375,
"learning_rate": 3.6585507246376815e-05,
"loss": 0.1791,
"step": 7690
},
{
"epoch": 1.1162697981225762,
"grad_norm": 0.341796875,
"learning_rate": 3.652753623188406e-05,
"loss": 0.1627,
"step": 7700
},
{
"epoch": 1.1177195462288427,
"grad_norm": 0.435546875,
"learning_rate": 3.646956521739131e-05,
"loss": 0.1688,
"step": 7710
},
{
"epoch": 1.1191692943351093,
"grad_norm": 0.455078125,
"learning_rate": 3.6411594202898556e-05,
"loss": 0.1643,
"step": 7720
},
{
"epoch": 1.1206190424413758,
"grad_norm": 0.45703125,
"learning_rate": 3.63536231884058e-05,
"loss": 0.1625,
"step": 7730
},
{
"epoch": 1.1220687905476423,
"grad_norm": 0.5078125,
"learning_rate": 3.629565217391305e-05,
"loss": 0.1725,
"step": 7740
},
{
"epoch": 1.1235185386539088,
"grad_norm": 0.373046875,
"learning_rate": 3.623768115942029e-05,
"loss": 0.1697,
"step": 7750
},
{
"epoch": 1.1249682867601754,
"grad_norm": 0.546875,
"learning_rate": 3.6179710144927536e-05,
"loss": 0.1659,
"step": 7760
},
{
"epoch": 1.1264180348664419,
"grad_norm": 0.41796875,
"learning_rate": 3.612173913043479e-05,
"loss": 0.1626,
"step": 7770
},
{
"epoch": 1.1278677829727084,
"grad_norm": 0.400390625,
"learning_rate": 3.606376811594203e-05,
"loss": 0.1687,
"step": 7780
},
{
"epoch": 1.129317531078975,
"grad_norm": 0.40234375,
"learning_rate": 3.600579710144928e-05,
"loss": 0.1644,
"step": 7790
},
{
"epoch": 1.1307672791852417,
"grad_norm": 0.486328125,
"learning_rate": 3.5947826086956524e-05,
"loss": 0.169,
"step": 7800
},
{
"epoch": 1.1322170272915082,
"grad_norm": 0.5390625,
"learning_rate": 3.588985507246377e-05,
"loss": 0.1697,
"step": 7810
},
{
"epoch": 1.1336667753977747,
"grad_norm": 0.427734375,
"learning_rate": 3.583188405797102e-05,
"loss": 0.1661,
"step": 7820
},
{
"epoch": 1.1351165235040412,
"grad_norm": 0.357421875,
"learning_rate": 3.5773913043478264e-05,
"loss": 0.1838,
"step": 7830
},
{
"epoch": 1.1365662716103078,
"grad_norm": 0.458984375,
"learning_rate": 3.571594202898551e-05,
"loss": 0.1771,
"step": 7840
},
{
"epoch": 1.1380160197165743,
"grad_norm": 0.421875,
"learning_rate": 3.565797101449276e-05,
"loss": 0.1645,
"step": 7850
},
{
"epoch": 1.1394657678228408,
"grad_norm": 0.3984375,
"learning_rate": 3.5600000000000005e-05,
"loss": 0.1583,
"step": 7860
},
{
"epoch": 1.1409155159291073,
"grad_norm": 0.408203125,
"learning_rate": 3.554202898550725e-05,
"loss": 0.1662,
"step": 7870
},
{
"epoch": 1.1423652640353739,
"grad_norm": 0.44921875,
"learning_rate": 3.548405797101449e-05,
"loss": 0.1713,
"step": 7880
},
{
"epoch": 1.1438150121416404,
"grad_norm": 0.5546875,
"learning_rate": 3.5426086956521745e-05,
"loss": 0.1745,
"step": 7890
},
{
"epoch": 1.145264760247907,
"grad_norm": 0.4140625,
"learning_rate": 3.536811594202899e-05,
"loss": 0.1584,
"step": 7900
},
{
"epoch": 1.1467145083541734,
"grad_norm": 0.4453125,
"learning_rate": 3.531014492753623e-05,
"loss": 0.1499,
"step": 7910
},
{
"epoch": 1.14816425646044,
"grad_norm": 0.447265625,
"learning_rate": 3.525217391304348e-05,
"loss": 0.1696,
"step": 7920
},
{
"epoch": 1.1496140045667065,
"grad_norm": 0.4296875,
"learning_rate": 3.5194202898550726e-05,
"loss": 0.1581,
"step": 7930
},
{
"epoch": 1.151063752672973,
"grad_norm": 0.40625,
"learning_rate": 3.513623188405797e-05,
"loss": 0.1657,
"step": 7940
},
{
"epoch": 1.1525135007792395,
"grad_norm": 0.392578125,
"learning_rate": 3.507826086956522e-05,
"loss": 0.141,
"step": 7950
},
{
"epoch": 1.153963248885506,
"grad_norm": 0.35546875,
"learning_rate": 3.5020289855072466e-05,
"loss": 0.1644,
"step": 7960
},
{
"epoch": 1.1554129969917728,
"grad_norm": 0.41015625,
"learning_rate": 3.496231884057971e-05,
"loss": 0.1662,
"step": 7970
},
{
"epoch": 1.156862745098039,
"grad_norm": 0.37109375,
"learning_rate": 3.490434782608696e-05,
"loss": 0.1687,
"step": 7980
},
{
"epoch": 1.1583124932043058,
"grad_norm": 0.390625,
"learning_rate": 3.484637681159421e-05,
"loss": 0.16,
"step": 7990
},
{
"epoch": 1.1597622413105724,
"grad_norm": 0.44921875,
"learning_rate": 3.478840579710145e-05,
"loss": 0.1688,
"step": 8000
},
{
"epoch": 1.161211989416839,
"grad_norm": 0.439453125,
"learning_rate": 3.47304347826087e-05,
"loss": 0.1769,
"step": 8010
},
{
"epoch": 1.1626617375231054,
"grad_norm": 0.400390625,
"learning_rate": 3.467246376811595e-05,
"loss": 0.167,
"step": 8020
},
{
"epoch": 1.164111485629372,
"grad_norm": 0.51171875,
"learning_rate": 3.461449275362319e-05,
"loss": 0.162,
"step": 8030
},
{
"epoch": 1.1655612337356385,
"grad_norm": 0.482421875,
"learning_rate": 3.455652173913044e-05,
"loss": 0.1698,
"step": 8040
},
{
"epoch": 1.167010981841905,
"grad_norm": 0.400390625,
"learning_rate": 3.449855072463769e-05,
"loss": 0.1723,
"step": 8050
},
{
"epoch": 1.1684607299481715,
"grad_norm": 0.42578125,
"learning_rate": 3.444057971014493e-05,
"loss": 0.1707,
"step": 8060
},
{
"epoch": 1.169910478054438,
"grad_norm": 0.384765625,
"learning_rate": 3.4382608695652175e-05,
"loss": 0.1614,
"step": 8070
},
{
"epoch": 1.1713602261607046,
"grad_norm": 0.3984375,
"learning_rate": 3.432463768115943e-05,
"loss": 0.1579,
"step": 8080
},
{
"epoch": 1.172809974266971,
"grad_norm": 0.435546875,
"learning_rate": 3.426666666666667e-05,
"loss": 0.1902,
"step": 8090
},
{
"epoch": 1.1742597223732376,
"grad_norm": 0.388671875,
"learning_rate": 3.4208695652173915e-05,
"loss": 0.1664,
"step": 8100
},
{
"epoch": 1.1757094704795041,
"grad_norm": 0.462890625,
"learning_rate": 3.415072463768116e-05,
"loss": 0.1695,
"step": 8110
},
{
"epoch": 1.1771592185857707,
"grad_norm": 0.453125,
"learning_rate": 3.409275362318841e-05,
"loss": 0.1818,
"step": 8120
},
{
"epoch": 1.1786089666920372,
"grad_norm": 0.458984375,
"learning_rate": 3.4034782608695656e-05,
"loss": 0.1813,
"step": 8130
},
{
"epoch": 1.1800587147983037,
"grad_norm": 0.4140625,
"learning_rate": 3.39768115942029e-05,
"loss": 0.1664,
"step": 8140
},
{
"epoch": 1.1815084629045702,
"grad_norm": 0.482421875,
"learning_rate": 3.391884057971015e-05,
"loss": 0.1787,
"step": 8150
},
{
"epoch": 1.182958211010837,
"grad_norm": 0.44921875,
"learning_rate": 3.3860869565217396e-05,
"loss": 0.1701,
"step": 8160
},
{
"epoch": 1.1844079591171033,
"grad_norm": 0.4375,
"learning_rate": 3.380289855072464e-05,
"loss": 0.1721,
"step": 8170
},
{
"epoch": 1.18585770722337,
"grad_norm": 0.48046875,
"learning_rate": 3.374492753623188e-05,
"loss": 0.1579,
"step": 8180
},
{
"epoch": 1.1873074553296366,
"grad_norm": 0.47265625,
"learning_rate": 3.368695652173913e-05,
"loss": 0.1869,
"step": 8190
},
{
"epoch": 1.188757203435903,
"grad_norm": 0.470703125,
"learning_rate": 3.3628985507246384e-05,
"loss": 0.1536,
"step": 8200
},
{
"epoch": 1.1902069515421696,
"grad_norm": 0.423828125,
"learning_rate": 3.3571014492753624e-05,
"loss": 0.1651,
"step": 8210
},
{
"epoch": 1.1916566996484361,
"grad_norm": 0.43359375,
"learning_rate": 3.351304347826087e-05,
"loss": 0.1725,
"step": 8220
},
{
"epoch": 1.1931064477547026,
"grad_norm": 0.37890625,
"learning_rate": 3.345507246376812e-05,
"loss": 0.1576,
"step": 8230
},
{
"epoch": 1.1945561958609692,
"grad_norm": 0.388671875,
"learning_rate": 3.3397101449275364e-05,
"loss": 0.1548,
"step": 8240
},
{
"epoch": 1.1960059439672357,
"grad_norm": 0.46875,
"learning_rate": 3.333913043478261e-05,
"loss": 0.1718,
"step": 8250
},
{
"epoch": 1.1974556920735022,
"grad_norm": 0.376953125,
"learning_rate": 3.328115942028986e-05,
"loss": 0.1576,
"step": 8260
},
{
"epoch": 1.1989054401797687,
"grad_norm": 0.38671875,
"learning_rate": 3.3223188405797105e-05,
"loss": 0.1737,
"step": 8270
},
{
"epoch": 1.2003551882860353,
"grad_norm": 0.5,
"learning_rate": 3.316521739130435e-05,
"loss": 0.1614,
"step": 8280
},
{
"epoch": 1.2018049363923018,
"grad_norm": 0.4296875,
"learning_rate": 3.31072463768116e-05,
"loss": 0.1717,
"step": 8290
},
{
"epoch": 1.2032546844985683,
"grad_norm": 0.3125,
"learning_rate": 3.3049275362318845e-05,
"loss": 0.1655,
"step": 8300
},
{
"epoch": 1.2047044326048348,
"grad_norm": 0.4140625,
"learning_rate": 3.2991304347826085e-05,
"loss": 0.17,
"step": 8310
},
{
"epoch": 1.2061541807111014,
"grad_norm": 0.42578125,
"learning_rate": 3.293333333333334e-05,
"loss": 0.1668,
"step": 8320
},
{
"epoch": 1.2076039288173679,
"grad_norm": 0.44921875,
"learning_rate": 3.2875362318840586e-05,
"loss": 0.1616,
"step": 8330
},
{
"epoch": 1.2090536769236344,
"grad_norm": 0.435546875,
"learning_rate": 3.2817391304347826e-05,
"loss": 0.164,
"step": 8340
},
{
"epoch": 1.2105034250299012,
"grad_norm": 0.353515625,
"learning_rate": 3.275942028985508e-05,
"loss": 0.1725,
"step": 8350
},
{
"epoch": 1.2119531731361677,
"grad_norm": 0.44140625,
"learning_rate": 3.270144927536232e-05,
"loss": 0.1851,
"step": 8360
},
{
"epoch": 1.2134029212424342,
"grad_norm": 0.462890625,
"learning_rate": 3.2643478260869566e-05,
"loss": 0.1599,
"step": 8370
},
{
"epoch": 1.2148526693487007,
"grad_norm": 0.369140625,
"learning_rate": 3.258550724637681e-05,
"loss": 0.1603,
"step": 8380
},
{
"epoch": 1.2163024174549673,
"grad_norm": 0.42578125,
"learning_rate": 3.252753623188406e-05,
"loss": 0.1624,
"step": 8390
},
{
"epoch": 1.2177521655612338,
"grad_norm": 0.384765625,
"learning_rate": 3.246956521739131e-05,
"loss": 0.1704,
"step": 8400
},
{
"epoch": 1.2192019136675003,
"grad_norm": 0.376953125,
"learning_rate": 3.2411594202898553e-05,
"loss": 0.1726,
"step": 8410
},
{
"epoch": 1.2206516617737668,
"grad_norm": 0.3671875,
"learning_rate": 3.23536231884058e-05,
"loss": 0.1612,
"step": 8420
},
{
"epoch": 1.2221014098800334,
"grad_norm": 0.318359375,
"learning_rate": 3.229565217391305e-05,
"loss": 0.1696,
"step": 8430
},
{
"epoch": 1.2235511579862999,
"grad_norm": 0.42578125,
"learning_rate": 3.2237681159420294e-05,
"loss": 0.1572,
"step": 8440
},
{
"epoch": 1.2250009060925664,
"grad_norm": 0.451171875,
"learning_rate": 3.217971014492754e-05,
"loss": 0.1694,
"step": 8450
},
{
"epoch": 1.226450654198833,
"grad_norm": 0.5,
"learning_rate": 3.212173913043478e-05,
"loss": 0.1724,
"step": 8460
},
{
"epoch": 1.2279004023050994,
"grad_norm": 0.412109375,
"learning_rate": 3.2063768115942034e-05,
"loss": 0.1724,
"step": 8470
},
{
"epoch": 1.229350150411366,
"grad_norm": 0.451171875,
"learning_rate": 3.200579710144928e-05,
"loss": 0.1751,
"step": 8480
},
{
"epoch": 1.2307998985176325,
"grad_norm": 0.5078125,
"learning_rate": 3.194782608695652e-05,
"loss": 0.1702,
"step": 8490
},
{
"epoch": 1.232249646623899,
"grad_norm": 0.345703125,
"learning_rate": 3.188985507246377e-05,
"loss": 0.1537,
"step": 8500
},
{
"epoch": 1.2336993947301655,
"grad_norm": 0.4765625,
"learning_rate": 3.183188405797102e-05,
"loss": 0.169,
"step": 8510
},
{
"epoch": 1.2351491428364323,
"grad_norm": 0.38671875,
"learning_rate": 3.177391304347826e-05,
"loss": 0.1591,
"step": 8520
},
{
"epoch": 1.2365988909426986,
"grad_norm": 0.40234375,
"learning_rate": 3.171594202898551e-05,
"loss": 0.1641,
"step": 8530
},
{
"epoch": 1.2380486390489653,
"grad_norm": 0.322265625,
"learning_rate": 3.1657971014492756e-05,
"loss": 0.1795,
"step": 8540
},
{
"epoch": 1.2394983871552319,
"grad_norm": 0.40625,
"learning_rate": 3.16e-05,
"loss": 0.152,
"step": 8550
},
{
"epoch": 1.2409481352614984,
"grad_norm": 0.36328125,
"learning_rate": 3.154202898550725e-05,
"loss": 0.1784,
"step": 8560
},
{
"epoch": 1.242397883367765,
"grad_norm": 0.45703125,
"learning_rate": 3.1484057971014496e-05,
"loss": 0.1703,
"step": 8570
},
{
"epoch": 1.2438476314740314,
"grad_norm": 0.390625,
"learning_rate": 3.142608695652174e-05,
"loss": 0.1648,
"step": 8580
},
{
"epoch": 1.245297379580298,
"grad_norm": 0.40625,
"learning_rate": 3.136811594202899e-05,
"loss": 0.1617,
"step": 8590
},
{
"epoch": 1.2467471276865645,
"grad_norm": 0.419921875,
"learning_rate": 3.1310144927536237e-05,
"loss": 0.1634,
"step": 8600
},
{
"epoch": 1.248196875792831,
"grad_norm": 0.375,
"learning_rate": 3.1252173913043477e-05,
"loss": 0.1504,
"step": 8610
},
{
"epoch": 1.2496466238990975,
"grad_norm": 0.4453125,
"learning_rate": 3.1194202898550723e-05,
"loss": 0.1588,
"step": 8620
},
{
"epoch": 1.251096372005364,
"grad_norm": 0.35546875,
"learning_rate": 3.113623188405798e-05,
"loss": 0.1659,
"step": 8630
},
{
"epoch": 1.2525461201116306,
"grad_norm": 0.439453125,
"learning_rate": 3.107826086956522e-05,
"loss": 0.1685,
"step": 8640
},
{
"epoch": 1.253995868217897,
"grad_norm": 0.5,
"learning_rate": 3.1020289855072464e-05,
"loss": 0.1783,
"step": 8650
},
{
"epoch": 1.2554456163241636,
"grad_norm": 0.41796875,
"learning_rate": 3.096231884057972e-05,
"loss": 0.1549,
"step": 8660
},
{
"epoch": 1.2568953644304302,
"grad_norm": 0.38671875,
"learning_rate": 3.090434782608696e-05,
"loss": 0.1605,
"step": 8670
},
{
"epoch": 1.2583451125366967,
"grad_norm": 0.466796875,
"learning_rate": 3.0846376811594204e-05,
"loss": 0.178,
"step": 8680
},
{
"epoch": 1.2597948606429634,
"grad_norm": 0.458984375,
"learning_rate": 3.078840579710145e-05,
"loss": 0.1672,
"step": 8690
},
{
"epoch": 1.2612446087492297,
"grad_norm": 0.40234375,
"learning_rate": 3.07304347826087e-05,
"loss": 0.1632,
"step": 8700
},
{
"epoch": 1.2626943568554965,
"grad_norm": 0.408203125,
"learning_rate": 3.0672463768115945e-05,
"loss": 0.1577,
"step": 8710
},
{
"epoch": 1.2641441049617628,
"grad_norm": 0.46484375,
"learning_rate": 3.061449275362319e-05,
"loss": 0.1617,
"step": 8720
},
{
"epoch": 1.2655938530680295,
"grad_norm": 0.423828125,
"learning_rate": 3.055652173913044e-05,
"loss": 0.163,
"step": 8730
},
{
"epoch": 1.267043601174296,
"grad_norm": 0.458984375,
"learning_rate": 3.0498550724637685e-05,
"loss": 0.1678,
"step": 8740
},
{
"epoch": 1.2684933492805626,
"grad_norm": 0.455078125,
"learning_rate": 3.0440579710144932e-05,
"loss": 0.1672,
"step": 8750
},
{
"epoch": 1.269943097386829,
"grad_norm": 0.416015625,
"learning_rate": 3.0382608695652176e-05,
"loss": 0.1707,
"step": 8760
},
{
"epoch": 1.2713928454930956,
"grad_norm": 0.41015625,
"learning_rate": 3.0324637681159423e-05,
"loss": 0.1587,
"step": 8770
},
{
"epoch": 1.2728425935993621,
"grad_norm": 0.388671875,
"learning_rate": 3.026666666666667e-05,
"loss": 0.1574,
"step": 8780
},
{
"epoch": 1.2742923417056287,
"grad_norm": 0.46875,
"learning_rate": 3.0208695652173916e-05,
"loss": 0.1552,
"step": 8790
},
{
"epoch": 1.2757420898118952,
"grad_norm": 0.384765625,
"learning_rate": 3.015072463768116e-05,
"loss": 0.1711,
"step": 8800
},
{
"epoch": 1.2771918379181617,
"grad_norm": 0.3828125,
"learning_rate": 3.0092753623188407e-05,
"loss": 0.1718,
"step": 8810
},
{
"epoch": 1.2786415860244282,
"grad_norm": 0.423828125,
"learning_rate": 3.0034782608695657e-05,
"loss": 0.1723,
"step": 8820
},
{
"epoch": 1.2800913341306948,
"grad_norm": 0.490234375,
"learning_rate": 2.99768115942029e-05,
"loss": 0.1638,
"step": 8830
},
{
"epoch": 1.2815410822369613,
"grad_norm": 0.38671875,
"learning_rate": 2.9918840579710147e-05,
"loss": 0.1674,
"step": 8840
},
{
"epoch": 1.2829908303432278,
"grad_norm": 0.431640625,
"learning_rate": 2.986086956521739e-05,
"loss": 0.1581,
"step": 8850
},
{
"epoch": 1.2844405784494943,
"grad_norm": 0.37890625,
"learning_rate": 2.980289855072464e-05,
"loss": 0.1585,
"step": 8860
},
{
"epoch": 1.2858903265557609,
"grad_norm": 0.4375,
"learning_rate": 2.9744927536231888e-05,
"loss": 0.1684,
"step": 8870
},
{
"epoch": 1.2873400746620276,
"grad_norm": 0.369140625,
"learning_rate": 2.968695652173913e-05,
"loss": 0.1677,
"step": 8880
},
{
"epoch": 1.288789822768294,
"grad_norm": 0.38671875,
"learning_rate": 2.9628985507246378e-05,
"loss": 0.162,
"step": 8890
},
{
"epoch": 1.2902395708745606,
"grad_norm": 0.41796875,
"learning_rate": 2.9571014492753628e-05,
"loss": 0.1612,
"step": 8900
},
{
"epoch": 1.291689318980827,
"grad_norm": 0.443359375,
"learning_rate": 2.951304347826087e-05,
"loss": 0.1638,
"step": 8910
},
{
"epoch": 1.2931390670870937,
"grad_norm": 0.439453125,
"learning_rate": 2.9455072463768118e-05,
"loss": 0.1653,
"step": 8920
},
{
"epoch": 1.2945888151933602,
"grad_norm": 0.40234375,
"learning_rate": 2.9397101449275362e-05,
"loss": 0.1764,
"step": 8930
},
{
"epoch": 1.2960385632996267,
"grad_norm": 0.4921875,
"learning_rate": 2.9339130434782612e-05,
"loss": 0.1619,
"step": 8940
},
{
"epoch": 1.2974883114058933,
"grad_norm": 0.53515625,
"learning_rate": 2.928115942028986e-05,
"loss": 0.1824,
"step": 8950
},
{
"epoch": 1.2989380595121598,
"grad_norm": 0.431640625,
"learning_rate": 2.9223188405797102e-05,
"loss": 0.1795,
"step": 8960
},
{
"epoch": 1.3003878076184263,
"grad_norm": 0.36328125,
"learning_rate": 2.9165217391304352e-05,
"loss": 0.1608,
"step": 8970
},
{
"epoch": 1.3018375557246928,
"grad_norm": 0.4296875,
"learning_rate": 2.9107246376811596e-05,
"loss": 0.1593,
"step": 8980
},
{
"epoch": 1.3032873038309594,
"grad_norm": 0.47265625,
"learning_rate": 2.9049275362318843e-05,
"loss": 0.1615,
"step": 8990
},
{
"epoch": 1.304737051937226,
"grad_norm": 0.43359375,
"learning_rate": 2.899130434782609e-05,
"loss": 0.1665,
"step": 9000
},
{
"epoch": 1.3061868000434924,
"grad_norm": 0.4375,
"learning_rate": 2.8933333333333336e-05,
"loss": 0.156,
"step": 9010
},
{
"epoch": 1.307636548149759,
"grad_norm": 0.45703125,
"learning_rate": 2.8875362318840583e-05,
"loss": 0.1627,
"step": 9020
},
{
"epoch": 1.3090862962560255,
"grad_norm": 0.40234375,
"learning_rate": 2.8817391304347827e-05,
"loss": 0.1768,
"step": 9030
},
{
"epoch": 1.310536044362292,
"grad_norm": 0.416015625,
"learning_rate": 2.8759420289855074e-05,
"loss": 0.166,
"step": 9040
},
{
"epoch": 1.3119857924685585,
"grad_norm": 0.39453125,
"learning_rate": 2.8701449275362324e-05,
"loss": 0.1779,
"step": 9050
},
{
"epoch": 1.313435540574825,
"grad_norm": 0.439453125,
"learning_rate": 2.8643478260869567e-05,
"loss": 0.1685,
"step": 9060
},
{
"epoch": 1.3148852886810918,
"grad_norm": 0.384765625,
"learning_rate": 2.8585507246376814e-05,
"loss": 0.1714,
"step": 9070
},
{
"epoch": 1.316335036787358,
"grad_norm": 0.412109375,
"learning_rate": 2.8527536231884057e-05,
"loss": 0.1635,
"step": 9080
},
{
"epoch": 1.3177847848936248,
"grad_norm": 0.484375,
"learning_rate": 2.8469565217391308e-05,
"loss": 0.1739,
"step": 9090
},
{
"epoch": 1.3192345329998914,
"grad_norm": 0.384765625,
"learning_rate": 2.8411594202898555e-05,
"loss": 0.1662,
"step": 9100
},
{
"epoch": 1.3206842811061579,
"grad_norm": 0.404296875,
"learning_rate": 2.8353623188405798e-05,
"loss": 0.1651,
"step": 9110
},
{
"epoch": 1.3221340292124244,
"grad_norm": 0.412109375,
"learning_rate": 2.8295652173913045e-05,
"loss": 0.1719,
"step": 9120
},
{
"epoch": 1.323583777318691,
"grad_norm": 0.447265625,
"learning_rate": 2.8237681159420295e-05,
"loss": 0.1607,
"step": 9130
},
{
"epoch": 1.3250335254249574,
"grad_norm": 0.443359375,
"learning_rate": 2.817971014492754e-05,
"loss": 0.1812,
"step": 9140
},
{
"epoch": 1.326483273531224,
"grad_norm": 0.38671875,
"learning_rate": 2.8121739130434785e-05,
"loss": 0.1608,
"step": 9150
},
{
"epoch": 1.3279330216374905,
"grad_norm": 0.361328125,
"learning_rate": 2.806376811594203e-05,
"loss": 0.1545,
"step": 9160
},
{
"epoch": 1.329382769743757,
"grad_norm": 0.373046875,
"learning_rate": 2.800579710144928e-05,
"loss": 0.172,
"step": 9170
},
{
"epoch": 1.3308325178500235,
"grad_norm": 0.384765625,
"learning_rate": 2.7947826086956526e-05,
"loss": 0.1538,
"step": 9180
},
{
"epoch": 1.33228226595629,
"grad_norm": 0.494140625,
"learning_rate": 2.788985507246377e-05,
"loss": 0.1626,
"step": 9190
},
{
"epoch": 1.3337320140625566,
"grad_norm": 0.423828125,
"learning_rate": 2.7831884057971016e-05,
"loss": 0.1723,
"step": 9200
},
{
"epoch": 1.3351817621688231,
"grad_norm": 0.458984375,
"learning_rate": 2.7773913043478263e-05,
"loss": 0.1839,
"step": 9210
},
{
"epoch": 1.3366315102750896,
"grad_norm": 0.388671875,
"learning_rate": 2.771594202898551e-05,
"loss": 0.1612,
"step": 9220
},
{
"epoch": 1.3380812583813562,
"grad_norm": 0.439453125,
"learning_rate": 2.7657971014492757e-05,
"loss": 0.1634,
"step": 9230
},
{
"epoch": 1.339531006487623,
"grad_norm": 0.39453125,
"learning_rate": 2.76e-05,
"loss": 0.1786,
"step": 9240
},
{
"epoch": 1.3409807545938892,
"grad_norm": 0.486328125,
"learning_rate": 2.754202898550725e-05,
"loss": 0.1759,
"step": 9250
},
{
"epoch": 1.342430502700156,
"grad_norm": 0.41015625,
"learning_rate": 2.7484057971014494e-05,
"loss": 0.1606,
"step": 9260
},
{
"epoch": 1.3438802508064223,
"grad_norm": 0.412109375,
"learning_rate": 2.742608695652174e-05,
"loss": 0.1611,
"step": 9270
},
{
"epoch": 1.345329998912689,
"grad_norm": 0.427734375,
"learning_rate": 2.736811594202899e-05,
"loss": 0.1762,
"step": 9280
},
{
"epoch": 1.3467797470189555,
"grad_norm": 0.408203125,
"learning_rate": 2.7310144927536234e-05,
"loss": 0.1643,
"step": 9290
},
{
"epoch": 1.348229495125222,
"grad_norm": 0.478515625,
"learning_rate": 2.725217391304348e-05,
"loss": 0.1741,
"step": 9300
},
{
"epoch": 1.3496792432314886,
"grad_norm": 0.400390625,
"learning_rate": 2.7194202898550724e-05,
"loss": 0.1671,
"step": 9310
},
{
"epoch": 1.351128991337755,
"grad_norm": 0.349609375,
"learning_rate": 2.7136231884057975e-05,
"loss": 0.1528,
"step": 9320
},
{
"epoch": 1.3525787394440216,
"grad_norm": 0.400390625,
"learning_rate": 2.707826086956522e-05,
"loss": 0.1443,
"step": 9330
},
{
"epoch": 1.3540284875502882,
"grad_norm": 0.404296875,
"learning_rate": 2.7020289855072465e-05,
"loss": 0.1662,
"step": 9340
},
{
"epoch": 1.3554782356565547,
"grad_norm": 0.37890625,
"learning_rate": 2.6962318840579712e-05,
"loss": 0.1662,
"step": 9350
},
{
"epoch": 1.3569279837628212,
"grad_norm": 0.66015625,
"learning_rate": 2.6904347826086962e-05,
"loss": 0.174,
"step": 9360
},
{
"epoch": 1.3583777318690877,
"grad_norm": 0.46875,
"learning_rate": 2.6846376811594205e-05,
"loss": 0.1701,
"step": 9370
},
{
"epoch": 1.3598274799753542,
"grad_norm": 0.35546875,
"learning_rate": 2.6788405797101452e-05,
"loss": 0.1501,
"step": 9380
},
{
"epoch": 1.3612772280816208,
"grad_norm": 0.404296875,
"learning_rate": 2.6730434782608696e-05,
"loss": 0.1789,
"step": 9390
},
{
"epoch": 1.3627269761878873,
"grad_norm": 0.443359375,
"learning_rate": 2.6672463768115946e-05,
"loss": 0.1577,
"step": 9400
},
{
"epoch": 1.3641767242941538,
"grad_norm": 0.40234375,
"learning_rate": 2.6614492753623193e-05,
"loss": 0.1787,
"step": 9410
},
{
"epoch": 1.3656264724004203,
"grad_norm": 0.46875,
"learning_rate": 2.6556521739130436e-05,
"loss": 0.1672,
"step": 9420
},
{
"epoch": 1.367076220506687,
"grad_norm": 0.39453125,
"learning_rate": 2.6498550724637683e-05,
"loss": 0.1587,
"step": 9430
},
{
"epoch": 1.3685259686129534,
"grad_norm": 0.49609375,
"learning_rate": 2.644057971014493e-05,
"loss": 0.1782,
"step": 9440
},
{
"epoch": 1.3699757167192201,
"grad_norm": 0.50390625,
"learning_rate": 2.6382608695652177e-05,
"loss": 0.1668,
"step": 9450
},
{
"epoch": 1.3714254648254864,
"grad_norm": 0.4453125,
"learning_rate": 2.632463768115942e-05,
"loss": 0.1661,
"step": 9460
},
{
"epoch": 1.3728752129317532,
"grad_norm": 0.36328125,
"learning_rate": 2.6266666666666667e-05,
"loss": 0.1557,
"step": 9470
},
{
"epoch": 1.3743249610380197,
"grad_norm": 0.4765625,
"learning_rate": 2.6208695652173917e-05,
"loss": 0.164,
"step": 9480
},
{
"epoch": 1.3757747091442862,
"grad_norm": 0.431640625,
"learning_rate": 2.615072463768116e-05,
"loss": 0.1644,
"step": 9490
},
{
"epoch": 1.3772244572505528,
"grad_norm": 0.421875,
"learning_rate": 2.6092753623188408e-05,
"loss": 0.182,
"step": 9500
},
{
"epoch": 1.3786742053568193,
"grad_norm": 0.412109375,
"learning_rate": 2.603478260869565e-05,
"loss": 0.1656,
"step": 9510
},
{
"epoch": 1.3801239534630858,
"grad_norm": 0.458984375,
"learning_rate": 2.59768115942029e-05,
"loss": 0.1654,
"step": 9520
},
{
"epoch": 1.3815737015693523,
"grad_norm": 0.435546875,
"learning_rate": 2.5918840579710148e-05,
"loss": 0.1672,
"step": 9530
},
{
"epoch": 1.3830234496756189,
"grad_norm": 0.44140625,
"learning_rate": 2.586086956521739e-05,
"loss": 0.1547,
"step": 9540
},
{
"epoch": 1.3844731977818854,
"grad_norm": 0.423828125,
"learning_rate": 2.580289855072464e-05,
"loss": 0.1678,
"step": 9550
},
{
"epoch": 1.385922945888152,
"grad_norm": 0.39453125,
"learning_rate": 2.574492753623189e-05,
"loss": 0.1614,
"step": 9560
},
{
"epoch": 1.3873726939944184,
"grad_norm": 0.388671875,
"learning_rate": 2.5686956521739132e-05,
"loss": 0.1545,
"step": 9570
},
{
"epoch": 1.388822442100685,
"grad_norm": 0.384765625,
"learning_rate": 2.562898550724638e-05,
"loss": 0.1701,
"step": 9580
},
{
"epoch": 1.3902721902069515,
"grad_norm": 0.42578125,
"learning_rate": 2.557101449275363e-05,
"loss": 0.173,
"step": 9590
},
{
"epoch": 1.391721938313218,
"grad_norm": 0.33984375,
"learning_rate": 2.5513043478260872e-05,
"loss": 0.1627,
"step": 9600
},
{
"epoch": 1.3931716864194845,
"grad_norm": 0.392578125,
"learning_rate": 2.545507246376812e-05,
"loss": 0.15,
"step": 9610
},
{
"epoch": 1.3946214345257513,
"grad_norm": 0.455078125,
"learning_rate": 2.5397101449275363e-05,
"loss": 0.1702,
"step": 9620
},
{
"epoch": 1.3960711826320176,
"grad_norm": 0.40625,
"learning_rate": 2.5339130434782613e-05,
"loss": 0.1537,
"step": 9630
},
{
"epoch": 1.3975209307382843,
"grad_norm": 0.390625,
"learning_rate": 2.5281159420289856e-05,
"loss": 0.1743,
"step": 9640
},
{
"epoch": 1.3989706788445508,
"grad_norm": 0.46875,
"learning_rate": 2.5223188405797103e-05,
"loss": 0.1688,
"step": 9650
},
{
"epoch": 1.4004204269508174,
"grad_norm": 0.41796875,
"learning_rate": 2.516521739130435e-05,
"loss": 0.1535,
"step": 9660
},
{
"epoch": 1.401870175057084,
"grad_norm": 0.451171875,
"learning_rate": 2.5107246376811597e-05,
"loss": 0.1507,
"step": 9670
},
{
"epoch": 1.4033199231633504,
"grad_norm": 0.482421875,
"learning_rate": 2.5049275362318844e-05,
"loss": 0.1601,
"step": 9680
},
{
"epoch": 1.404769671269617,
"grad_norm": 0.390625,
"learning_rate": 2.4991304347826087e-05,
"loss": 0.1667,
"step": 9690
},
{
"epoch": 1.4062194193758835,
"grad_norm": 0.48046875,
"learning_rate": 2.4933333333333334e-05,
"loss": 0.1815,
"step": 9700
},
{
"epoch": 1.40766916748215,
"grad_norm": 0.392578125,
"learning_rate": 2.4875362318840584e-05,
"loss": 0.1537,
"step": 9710
},
{
"epoch": 1.4091189155884165,
"grad_norm": 0.3515625,
"learning_rate": 2.4817391304347828e-05,
"loss": 0.1672,
"step": 9720
},
{
"epoch": 1.410568663694683,
"grad_norm": 0.478515625,
"learning_rate": 2.4759420289855075e-05,
"loss": 0.174,
"step": 9730
},
{
"epoch": 1.4120184118009496,
"grad_norm": 0.474609375,
"learning_rate": 2.4701449275362318e-05,
"loss": 0.1702,
"step": 9740
},
{
"epoch": 1.413468159907216,
"grad_norm": 0.4375,
"learning_rate": 2.4643478260869568e-05,
"loss": 0.1627,
"step": 9750
},
{
"epoch": 1.4149179080134826,
"grad_norm": 0.458984375,
"learning_rate": 2.4585507246376815e-05,
"loss": 0.1747,
"step": 9760
},
{
"epoch": 1.4163676561197491,
"grad_norm": 0.40625,
"learning_rate": 2.452753623188406e-05,
"loss": 0.1628,
"step": 9770
},
{
"epoch": 1.4178174042260157,
"grad_norm": 0.455078125,
"learning_rate": 2.4469565217391305e-05,
"loss": 0.1715,
"step": 9780
},
{
"epoch": 1.4192671523322824,
"grad_norm": 0.388671875,
"learning_rate": 2.4411594202898556e-05,
"loss": 0.145,
"step": 9790
},
{
"epoch": 1.4207169004385487,
"grad_norm": 0.45703125,
"learning_rate": 2.43536231884058e-05,
"loss": 0.1723,
"step": 9800
},
{
"epoch": 1.4221666485448154,
"grad_norm": 0.439453125,
"learning_rate": 2.4295652173913046e-05,
"loss": 0.1802,
"step": 9810
},
{
"epoch": 1.4236163966510818,
"grad_norm": 0.34765625,
"learning_rate": 2.423768115942029e-05,
"loss": 0.1656,
"step": 9820
},
{
"epoch": 1.4250661447573485,
"grad_norm": 0.4375,
"learning_rate": 2.417971014492754e-05,
"loss": 0.1641,
"step": 9830
},
{
"epoch": 1.426515892863615,
"grad_norm": 0.458984375,
"learning_rate": 2.4121739130434786e-05,
"loss": 0.1689,
"step": 9840
},
{
"epoch": 1.4279656409698815,
"grad_norm": 0.390625,
"learning_rate": 2.406376811594203e-05,
"loss": 0.1649,
"step": 9850
},
{
"epoch": 1.429415389076148,
"grad_norm": 0.455078125,
"learning_rate": 2.4005797101449277e-05,
"loss": 0.1557,
"step": 9860
},
{
"epoch": 1.4308651371824146,
"grad_norm": 0.45703125,
"learning_rate": 2.3947826086956523e-05,
"loss": 0.159,
"step": 9870
},
{
"epoch": 1.4323148852886811,
"grad_norm": 0.38671875,
"learning_rate": 2.388985507246377e-05,
"loss": 0.1632,
"step": 9880
},
{
"epoch": 1.4337646333949476,
"grad_norm": 0.392578125,
"learning_rate": 2.3831884057971014e-05,
"loss": 0.1571,
"step": 9890
},
{
"epoch": 1.4352143815012142,
"grad_norm": 0.361328125,
"learning_rate": 2.3773913043478264e-05,
"loss": 0.1583,
"step": 9900
},
{
"epoch": 1.4366641296074807,
"grad_norm": 0.396484375,
"learning_rate": 2.371594202898551e-05,
"loss": 0.168,
"step": 9910
},
{
"epoch": 1.4381138777137472,
"grad_norm": 0.416015625,
"learning_rate": 2.3657971014492754e-05,
"loss": 0.1754,
"step": 9920
},
{
"epoch": 1.4395636258200137,
"grad_norm": 0.416015625,
"learning_rate": 2.36e-05,
"loss": 0.1677,
"step": 9930
},
{
"epoch": 1.4410133739262803,
"grad_norm": 0.42578125,
"learning_rate": 2.354202898550725e-05,
"loss": 0.1593,
"step": 9940
},
{
"epoch": 1.4424631220325468,
"grad_norm": 0.455078125,
"learning_rate": 2.3484057971014495e-05,
"loss": 0.163,
"step": 9950
},
{
"epoch": 1.4439128701388133,
"grad_norm": 0.392578125,
"learning_rate": 2.342608695652174e-05,
"loss": 0.1653,
"step": 9960
},
{
"epoch": 1.4453626182450798,
"grad_norm": 0.515625,
"learning_rate": 2.3368115942028985e-05,
"loss": 0.1646,
"step": 9970
},
{
"epoch": 1.4468123663513466,
"grad_norm": 0.46484375,
"learning_rate": 2.3310144927536235e-05,
"loss": 0.1684,
"step": 9980
},
{
"epoch": 1.4482621144576129,
"grad_norm": 0.455078125,
"learning_rate": 2.3252173913043482e-05,
"loss": 0.1589,
"step": 9990
},
{
"epoch": 1.4497118625638796,
"grad_norm": 0.435546875,
"learning_rate": 2.3194202898550725e-05,
"loss": 0.1601,
"step": 10000
},
{
"epoch": 1.451161610670146,
"grad_norm": 0.388671875,
"learning_rate": 2.3136231884057972e-05,
"loss": 0.1691,
"step": 10010
},
{
"epoch": 1.4526113587764127,
"grad_norm": 0.388671875,
"learning_rate": 2.3078260869565223e-05,
"loss": 0.1694,
"step": 10020
},
{
"epoch": 1.4540611068826792,
"grad_norm": 0.3984375,
"learning_rate": 2.3020289855072466e-05,
"loss": 0.1718,
"step": 10030
},
{
"epoch": 1.4555108549889457,
"grad_norm": 0.40234375,
"learning_rate": 2.2962318840579713e-05,
"loss": 0.1715,
"step": 10040
},
{
"epoch": 1.4569606030952122,
"grad_norm": 0.3984375,
"learning_rate": 2.2904347826086956e-05,
"loss": 0.1755,
"step": 10050
},
{
"epoch": 1.4584103512014788,
"grad_norm": 0.37890625,
"learning_rate": 2.2846376811594206e-05,
"loss": 0.155,
"step": 10060
},
{
"epoch": 1.4598600993077453,
"grad_norm": 0.41015625,
"learning_rate": 2.278840579710145e-05,
"loss": 0.16,
"step": 10070
},
{
"epoch": 1.4613098474140118,
"grad_norm": 0.421875,
"learning_rate": 2.2730434782608697e-05,
"loss": 0.1684,
"step": 10080
},
{
"epoch": 1.4627595955202783,
"grad_norm": 0.400390625,
"learning_rate": 2.2672463768115944e-05,
"loss": 0.171,
"step": 10090
},
{
"epoch": 1.4642093436265449,
"grad_norm": 0.455078125,
"learning_rate": 2.261449275362319e-05,
"loss": 0.1503,
"step": 10100
},
{
"epoch": 1.4656590917328114,
"grad_norm": 0.5390625,
"learning_rate": 2.2556521739130437e-05,
"loss": 0.1619,
"step": 10110
},
{
"epoch": 1.467108839839078,
"grad_norm": 0.466796875,
"learning_rate": 2.249855072463768e-05,
"loss": 0.1551,
"step": 10120
},
{
"epoch": 1.4685585879453444,
"grad_norm": 0.50390625,
"learning_rate": 2.2440579710144928e-05,
"loss": 0.174,
"step": 10130
},
{
"epoch": 1.470008336051611,
"grad_norm": 0.4375,
"learning_rate": 2.2382608695652178e-05,
"loss": 0.1649,
"step": 10140
},
{
"epoch": 1.4714580841578775,
"grad_norm": 0.462890625,
"learning_rate": 2.232463768115942e-05,
"loss": 0.1524,
"step": 10150
},
{
"epoch": 1.472907832264144,
"grad_norm": 0.373046875,
"learning_rate": 2.2266666666666668e-05,
"loss": 0.1641,
"step": 10160
},
{
"epoch": 1.4743575803704108,
"grad_norm": 0.4609375,
"learning_rate": 2.220869565217391e-05,
"loss": 0.1689,
"step": 10170
},
{
"epoch": 1.475807328476677,
"grad_norm": 0.40625,
"learning_rate": 2.2150724637681162e-05,
"loss": 0.1495,
"step": 10180
},
{
"epoch": 1.4772570765829438,
"grad_norm": 0.400390625,
"learning_rate": 2.209275362318841e-05,
"loss": 0.1598,
"step": 10190
},
{
"epoch": 1.4787068246892103,
"grad_norm": 0.421875,
"learning_rate": 2.2034782608695652e-05,
"loss": 0.1583,
"step": 10200
},
{
"epoch": 1.4801565727954769,
"grad_norm": 0.4609375,
"learning_rate": 2.1976811594202902e-05,
"loss": 0.1721,
"step": 10210
},
{
"epoch": 1.4816063209017434,
"grad_norm": 0.4765625,
"learning_rate": 2.191884057971015e-05,
"loss": 0.1694,
"step": 10220
},
{
"epoch": 1.48305606900801,
"grad_norm": 0.419921875,
"learning_rate": 2.1860869565217393e-05,
"loss": 0.1676,
"step": 10230
},
{
"epoch": 1.4845058171142764,
"grad_norm": 0.44140625,
"learning_rate": 2.180289855072464e-05,
"loss": 0.1557,
"step": 10240
},
{
"epoch": 1.485955565220543,
"grad_norm": 0.408203125,
"learning_rate": 2.174492753623189e-05,
"loss": 0.167,
"step": 10250
},
{
"epoch": 1.4874053133268095,
"grad_norm": 0.353515625,
"learning_rate": 2.1686956521739133e-05,
"loss": 0.1706,
"step": 10260
},
{
"epoch": 1.488855061433076,
"grad_norm": 0.3671875,
"learning_rate": 2.162898550724638e-05,
"loss": 0.1546,
"step": 10270
},
{
"epoch": 1.4903048095393425,
"grad_norm": 0.404296875,
"learning_rate": 2.1571014492753623e-05,
"loss": 0.1689,
"step": 10280
},
{
"epoch": 1.491754557645609,
"grad_norm": 0.48046875,
"learning_rate": 2.1513043478260874e-05,
"loss": 0.169,
"step": 10290
},
{
"epoch": 1.4932043057518756,
"grad_norm": 0.455078125,
"learning_rate": 2.1455072463768117e-05,
"loss": 0.1765,
"step": 10300
},
{
"epoch": 1.494654053858142,
"grad_norm": 0.447265625,
"learning_rate": 2.1397101449275364e-05,
"loss": 0.1713,
"step": 10310
},
{
"epoch": 1.4961038019644086,
"grad_norm": 0.408203125,
"learning_rate": 2.133913043478261e-05,
"loss": 0.1612,
"step": 10320
},
{
"epoch": 1.4975535500706751,
"grad_norm": 0.4296875,
"learning_rate": 2.1281159420289857e-05,
"loss": 0.1753,
"step": 10330
},
{
"epoch": 1.499003298176942,
"grad_norm": 0.443359375,
"learning_rate": 2.1223188405797104e-05,
"loss": 0.1583,
"step": 10340
},
{
"epoch": 1.5004530462832082,
"grad_norm": 0.466796875,
"learning_rate": 2.1165217391304348e-05,
"loss": 0.1728,
"step": 10350
},
{
"epoch": 1.501902794389475,
"grad_norm": 0.376953125,
"learning_rate": 2.1107246376811595e-05,
"loss": 0.1703,
"step": 10360
},
{
"epoch": 1.5033525424957412,
"grad_norm": 0.37890625,
"learning_rate": 2.1049275362318845e-05,
"loss": 0.1625,
"step": 10370
},
{
"epoch": 1.504802290602008,
"grad_norm": 0.435546875,
"learning_rate": 2.0991304347826088e-05,
"loss": 0.1553,
"step": 10380
},
{
"epoch": 1.5062520387082743,
"grad_norm": 0.447265625,
"learning_rate": 2.0933333333333335e-05,
"loss": 0.1716,
"step": 10390
},
{
"epoch": 1.507701786814541,
"grad_norm": 0.40234375,
"learning_rate": 2.087536231884058e-05,
"loss": 0.1639,
"step": 10400
},
{
"epoch": 1.5091515349208076,
"grad_norm": 0.41796875,
"learning_rate": 2.081739130434783e-05,
"loss": 0.1523,
"step": 10410
},
{
"epoch": 1.510601283027074,
"grad_norm": 0.4609375,
"learning_rate": 2.0759420289855076e-05,
"loss": 0.1728,
"step": 10420
},
{
"epoch": 1.5120510311333406,
"grad_norm": 0.408203125,
"learning_rate": 2.070144927536232e-05,
"loss": 0.1717,
"step": 10430
},
{
"epoch": 1.5135007792396071,
"grad_norm": 0.435546875,
"learning_rate": 2.0643478260869566e-05,
"loss": 0.1745,
"step": 10440
},
{
"epoch": 1.5149505273458737,
"grad_norm": 0.4296875,
"learning_rate": 2.0585507246376816e-05,
"loss": 0.1633,
"step": 10450
},
{
"epoch": 1.5164002754521402,
"grad_norm": 0.3984375,
"learning_rate": 2.052753623188406e-05,
"loss": 0.1506,
"step": 10460
},
{
"epoch": 1.5178500235584067,
"grad_norm": 0.32421875,
"learning_rate": 2.0469565217391306e-05,
"loss": 0.1501,
"step": 10470
},
{
"epoch": 1.5192997716646732,
"grad_norm": 0.365234375,
"learning_rate": 2.041159420289855e-05,
"loss": 0.1684,
"step": 10480
},
{
"epoch": 1.5207495197709398,
"grad_norm": 0.41796875,
"learning_rate": 2.03536231884058e-05,
"loss": 0.1696,
"step": 10490
},
{
"epoch": 1.5221992678772063,
"grad_norm": 0.375,
"learning_rate": 2.0295652173913047e-05,
"loss": 0.1682,
"step": 10500
},
{
"epoch": 1.523649015983473,
"grad_norm": 0.4921875,
"learning_rate": 2.023768115942029e-05,
"loss": 0.1739,
"step": 10510
},
{
"epoch": 1.5250987640897393,
"grad_norm": 0.41015625,
"learning_rate": 2.017971014492754e-05,
"loss": 0.1676,
"step": 10520
},
{
"epoch": 1.526548512196006,
"grad_norm": 0.40234375,
"learning_rate": 2.0121739130434784e-05,
"loss": 0.1734,
"step": 10530
},
{
"epoch": 1.5279982603022724,
"grad_norm": 0.431640625,
"learning_rate": 2.006376811594203e-05,
"loss": 0.1573,
"step": 10540
},
{
"epoch": 1.5294480084085391,
"grad_norm": 0.4140625,
"learning_rate": 2.0005797101449274e-05,
"loss": 0.1741,
"step": 10550
},
{
"epoch": 1.5308977565148054,
"grad_norm": 0.392578125,
"learning_rate": 1.9947826086956524e-05,
"loss": 0.1689,
"step": 10560
},
{
"epoch": 1.5323475046210722,
"grad_norm": 0.484375,
"learning_rate": 1.988985507246377e-05,
"loss": 0.1775,
"step": 10570
},
{
"epoch": 1.5337972527273385,
"grad_norm": 0.388671875,
"learning_rate": 1.9831884057971015e-05,
"loss": 0.1664,
"step": 10580
},
{
"epoch": 1.5352470008336052,
"grad_norm": 0.3515625,
"learning_rate": 1.9773913043478265e-05,
"loss": 0.173,
"step": 10590
},
{
"epoch": 1.5366967489398717,
"grad_norm": 0.38671875,
"learning_rate": 1.971594202898551e-05,
"loss": 0.1725,
"step": 10600
},
{
"epoch": 1.5381464970461383,
"grad_norm": 0.3984375,
"learning_rate": 1.9657971014492755e-05,
"loss": 0.1647,
"step": 10610
},
{
"epoch": 1.5395962451524048,
"grad_norm": 0.390625,
"learning_rate": 1.9600000000000002e-05,
"loss": 0.1486,
"step": 10620
},
{
"epoch": 1.5410459932586713,
"grad_norm": 0.337890625,
"learning_rate": 1.954202898550725e-05,
"loss": 0.1613,
"step": 10630
},
{
"epoch": 1.5424957413649378,
"grad_norm": 0.4375,
"learning_rate": 1.9484057971014492e-05,
"loss": 0.167,
"step": 10640
},
{
"epoch": 1.5439454894712044,
"grad_norm": 0.443359375,
"learning_rate": 1.9426086956521743e-05,
"loss": 0.1582,
"step": 10650
},
{
"epoch": 1.5453952375774709,
"grad_norm": 0.44921875,
"learning_rate": 1.9368115942028986e-05,
"loss": 0.1573,
"step": 10660
},
{
"epoch": 1.5468449856837374,
"grad_norm": 0.466796875,
"learning_rate": 1.9310144927536233e-05,
"loss": 0.1674,
"step": 10670
},
{
"epoch": 1.5482947337900042,
"grad_norm": 0.380859375,
"learning_rate": 1.925217391304348e-05,
"loss": 0.1688,
"step": 10680
},
{
"epoch": 1.5497444818962705,
"grad_norm": 0.361328125,
"learning_rate": 1.9194202898550727e-05,
"loss": 0.1506,
"step": 10690
},
{
"epoch": 1.5511942300025372,
"grad_norm": 0.333984375,
"learning_rate": 1.9136231884057973e-05,
"loss": 0.1718,
"step": 10700
},
{
"epoch": 1.5526439781088035,
"grad_norm": 0.365234375,
"learning_rate": 1.907826086956522e-05,
"loss": 0.1664,
"step": 10710
},
{
"epoch": 1.5540937262150702,
"grad_norm": 0.451171875,
"learning_rate": 1.9020289855072464e-05,
"loss": 0.1588,
"step": 10720
},
{
"epoch": 1.5555434743213366,
"grad_norm": 0.453125,
"learning_rate": 1.896231884057971e-05,
"loss": 0.1767,
"step": 10730
},
{
"epoch": 1.5569932224276033,
"grad_norm": 0.4453125,
"learning_rate": 1.8904347826086957e-05,
"loss": 0.1636,
"step": 10740
},
{
"epoch": 1.5584429705338696,
"grad_norm": 0.359375,
"learning_rate": 1.8846376811594204e-05,
"loss": 0.1524,
"step": 10750
},
{
"epoch": 1.5598927186401363,
"grad_norm": 0.341796875,
"learning_rate": 1.878840579710145e-05,
"loss": 0.1394,
"step": 10760
},
{
"epoch": 1.5613424667464026,
"grad_norm": 0.431640625,
"learning_rate": 1.8730434782608698e-05,
"loss": 0.1804,
"step": 10770
},
{
"epoch": 1.5627922148526694,
"grad_norm": 0.390625,
"learning_rate": 1.867246376811594e-05,
"loss": 0.1591,
"step": 10780
},
{
"epoch": 1.564241962958936,
"grad_norm": 0.45703125,
"learning_rate": 1.861449275362319e-05,
"loss": 0.162,
"step": 10790
},
{
"epoch": 1.5656917110652024,
"grad_norm": 0.39453125,
"learning_rate": 1.8556521739130435e-05,
"loss": 0.1959,
"step": 10800
},
{
"epoch": 1.567141459171469,
"grad_norm": 0.435546875,
"learning_rate": 1.8498550724637682e-05,
"loss": 0.1571,
"step": 10810
},
{
"epoch": 1.5685912072777355,
"grad_norm": 0.41796875,
"learning_rate": 1.844057971014493e-05,
"loss": 0.1801,
"step": 10820
},
{
"epoch": 1.570040955384002,
"grad_norm": 0.34375,
"learning_rate": 1.8382608695652175e-05,
"loss": 0.154,
"step": 10830
},
{
"epoch": 1.5714907034902685,
"grad_norm": 0.4453125,
"learning_rate": 1.8324637681159422e-05,
"loss": 0.1488,
"step": 10840
},
{
"epoch": 1.572940451596535,
"grad_norm": 0.43359375,
"learning_rate": 1.826666666666667e-05,
"loss": 0.1539,
"step": 10850
},
{
"epoch": 1.5743901997028016,
"grad_norm": 0.404296875,
"learning_rate": 1.8208695652173916e-05,
"loss": 0.1667,
"step": 10860
},
{
"epoch": 1.5758399478090683,
"grad_norm": 0.36328125,
"learning_rate": 1.815072463768116e-05,
"loss": 0.1624,
"step": 10870
},
{
"epoch": 1.5772896959153346,
"grad_norm": 0.388671875,
"learning_rate": 1.809275362318841e-05,
"loss": 0.1503,
"step": 10880
},
{
"epoch": 1.5787394440216014,
"grad_norm": 0.359375,
"learning_rate": 1.8034782608695653e-05,
"loss": 0.1643,
"step": 10890
},
{
"epoch": 1.5801891921278677,
"grad_norm": 0.38671875,
"learning_rate": 1.79768115942029e-05,
"loss": 0.156,
"step": 10900
},
{
"epoch": 1.5816389402341344,
"grad_norm": 0.4296875,
"learning_rate": 1.7918840579710147e-05,
"loss": 0.1632,
"step": 10910
},
{
"epoch": 1.5830886883404007,
"grad_norm": 0.44140625,
"learning_rate": 1.7860869565217394e-05,
"loss": 0.1586,
"step": 10920
},
{
"epoch": 1.5845384364466675,
"grad_norm": 0.400390625,
"learning_rate": 1.780289855072464e-05,
"loss": 0.1633,
"step": 10930
},
{
"epoch": 1.5859881845529338,
"grad_norm": 0.359375,
"learning_rate": 1.7744927536231887e-05,
"loss": 0.1593,
"step": 10940
},
{
"epoch": 1.5874379326592005,
"grad_norm": 0.431640625,
"learning_rate": 1.768695652173913e-05,
"loss": 0.1715,
"step": 10950
},
{
"epoch": 1.588887680765467,
"grad_norm": 0.427734375,
"learning_rate": 1.7628985507246377e-05,
"loss": 0.1673,
"step": 10960
},
{
"epoch": 1.5903374288717336,
"grad_norm": 0.400390625,
"learning_rate": 1.7571014492753624e-05,
"loss": 0.1556,
"step": 10970
},
{
"epoch": 1.591787176978,
"grad_norm": 0.4609375,
"learning_rate": 1.751304347826087e-05,
"loss": 0.1662,
"step": 10980
},
{
"epoch": 1.5932369250842666,
"grad_norm": 0.455078125,
"learning_rate": 1.7455072463768118e-05,
"loss": 0.1735,
"step": 10990
},
{
"epoch": 1.5946866731905331,
"grad_norm": 0.40625,
"learning_rate": 1.7397101449275365e-05,
"loss": 0.1597,
"step": 11000
},
{
"epoch": 1.5961364212967997,
"grad_norm": 0.4453125,
"learning_rate": 1.7339130434782608e-05,
"loss": 0.1893,
"step": 11010
},
{
"epoch": 1.5975861694030662,
"grad_norm": 0.40625,
"learning_rate": 1.728115942028986e-05,
"loss": 0.1653,
"step": 11020
},
{
"epoch": 1.5990359175093327,
"grad_norm": 0.41015625,
"learning_rate": 1.7223188405797102e-05,
"loss": 0.1702,
"step": 11030
},
{
"epoch": 1.6004856656155992,
"grad_norm": 0.439453125,
"learning_rate": 1.716521739130435e-05,
"loss": 0.1622,
"step": 11040
},
{
"epoch": 1.6019354137218658,
"grad_norm": 0.400390625,
"learning_rate": 1.7107246376811596e-05,
"loss": 0.1657,
"step": 11050
},
{
"epoch": 1.6033851618281325,
"grad_norm": 0.396484375,
"learning_rate": 1.7049275362318842e-05,
"loss": 0.1636,
"step": 11060
},
{
"epoch": 1.6048349099343988,
"grad_norm": 0.439453125,
"learning_rate": 1.6991304347826086e-05,
"loss": 0.1576,
"step": 11070
},
{
"epoch": 1.6062846580406656,
"grad_norm": 0.419921875,
"learning_rate": 1.6933333333333336e-05,
"loss": 0.17,
"step": 11080
},
{
"epoch": 1.6077344061469319,
"grad_norm": 0.435546875,
"learning_rate": 1.687536231884058e-05,
"loss": 0.149,
"step": 11090
},
{
"epoch": 1.6091841542531986,
"grad_norm": 0.416015625,
"learning_rate": 1.6817391304347826e-05,
"loss": 0.1623,
"step": 11100
},
{
"epoch": 1.610633902359465,
"grad_norm": 0.41796875,
"learning_rate": 1.6759420289855073e-05,
"loss": 0.1643,
"step": 11110
},
{
"epoch": 1.6120836504657317,
"grad_norm": 0.421875,
"learning_rate": 1.670144927536232e-05,
"loss": 0.1624,
"step": 11120
},
{
"epoch": 1.613533398571998,
"grad_norm": 0.458984375,
"learning_rate": 1.6643478260869567e-05,
"loss": 0.1622,
"step": 11130
},
{
"epoch": 1.6149831466782647,
"grad_norm": 0.408203125,
"learning_rate": 1.6585507246376814e-05,
"loss": 0.1689,
"step": 11140
},
{
"epoch": 1.6164328947845312,
"grad_norm": 0.40234375,
"learning_rate": 1.652753623188406e-05,
"loss": 0.1643,
"step": 11150
},
{
"epoch": 1.6178826428907978,
"grad_norm": 0.419921875,
"learning_rate": 1.6469565217391304e-05,
"loss": 0.1622,
"step": 11160
},
{
"epoch": 1.6193323909970643,
"grad_norm": 0.33984375,
"learning_rate": 1.6411594202898554e-05,
"loss": 0.156,
"step": 11170
},
{
"epoch": 1.6207821391033308,
"grad_norm": 0.447265625,
"learning_rate": 1.6353623188405798e-05,
"loss": 0.1586,
"step": 11180
},
{
"epoch": 1.6222318872095973,
"grad_norm": 0.466796875,
"learning_rate": 1.6295652173913044e-05,
"loss": 0.1556,
"step": 11190
},
{
"epoch": 1.6236816353158638,
"grad_norm": 0.451171875,
"learning_rate": 1.623768115942029e-05,
"loss": 0.1556,
"step": 11200
},
{
"epoch": 1.6251313834221304,
"grad_norm": 0.423828125,
"learning_rate": 1.6179710144927538e-05,
"loss": 0.1622,
"step": 11210
},
{
"epoch": 1.626581131528397,
"grad_norm": 0.466796875,
"learning_rate": 1.6121739130434785e-05,
"loss": 0.1734,
"step": 11220
},
{
"epoch": 1.6280308796346636,
"grad_norm": 0.361328125,
"learning_rate": 1.6063768115942032e-05,
"loss": 0.1546,
"step": 11230
},
{
"epoch": 1.62948062774093,
"grad_norm": 0.51171875,
"learning_rate": 1.6005797101449275e-05,
"loss": 0.1653,
"step": 11240
},
{
"epoch": 1.6309303758471967,
"grad_norm": 0.4453125,
"learning_rate": 1.5947826086956522e-05,
"loss": 0.1666,
"step": 11250
},
{
"epoch": 1.632380123953463,
"grad_norm": 0.427734375,
"learning_rate": 1.588985507246377e-05,
"loss": 0.1611,
"step": 11260
},
{
"epoch": 1.6338298720597297,
"grad_norm": 0.439453125,
"learning_rate": 1.5831884057971016e-05,
"loss": 0.1686,
"step": 11270
},
{
"epoch": 1.635279620165996,
"grad_norm": 0.376953125,
"learning_rate": 1.5773913043478263e-05,
"loss": 0.1753,
"step": 11280
},
{
"epoch": 1.6367293682722628,
"grad_norm": 0.390625,
"learning_rate": 1.571594202898551e-05,
"loss": 0.1641,
"step": 11290
},
{
"epoch": 1.638179116378529,
"grad_norm": 0.4609375,
"learning_rate": 1.5657971014492753e-05,
"loss": 0.1592,
"step": 11300
},
{
"epoch": 1.6396288644847958,
"grad_norm": 0.3984375,
"learning_rate": 1.5600000000000003e-05,
"loss": 0.1613,
"step": 11310
},
{
"epoch": 1.6410786125910624,
"grad_norm": 0.400390625,
"learning_rate": 1.5542028985507247e-05,
"loss": 0.1674,
"step": 11320
},
{
"epoch": 1.6425283606973289,
"grad_norm": 0.486328125,
"learning_rate": 1.5484057971014493e-05,
"loss": 0.1746,
"step": 11330
},
{
"epoch": 1.6439781088035954,
"grad_norm": 0.369140625,
"learning_rate": 1.542608695652174e-05,
"loss": 0.1702,
"step": 11340
},
{
"epoch": 1.645427856909862,
"grad_norm": 0.38671875,
"learning_rate": 1.5368115942028987e-05,
"loss": 0.1614,
"step": 11350
},
{
"epoch": 1.6468776050161285,
"grad_norm": 0.435546875,
"learning_rate": 1.5310144927536234e-05,
"loss": 0.1659,
"step": 11360
},
{
"epoch": 1.648327353122395,
"grad_norm": 0.388671875,
"learning_rate": 1.5252173913043479e-05,
"loss": 0.165,
"step": 11370
},
{
"epoch": 1.6497771012286615,
"grad_norm": 0.427734375,
"learning_rate": 1.5194202898550726e-05,
"loss": 0.1636,
"step": 11380
},
{
"epoch": 1.651226849334928,
"grad_norm": 0.5390625,
"learning_rate": 1.5136231884057973e-05,
"loss": 0.1574,
"step": 11390
},
{
"epoch": 1.6526765974411946,
"grad_norm": 0.4296875,
"learning_rate": 1.5078260869565218e-05,
"loss": 0.1674,
"step": 11400
},
{
"epoch": 1.654126345547461,
"grad_norm": 0.431640625,
"learning_rate": 1.5020289855072465e-05,
"loss": 0.1605,
"step": 11410
},
{
"epoch": 1.6555760936537278,
"grad_norm": 0.42578125,
"learning_rate": 1.496231884057971e-05,
"loss": 0.1583,
"step": 11420
},
{
"epoch": 1.6570258417599941,
"grad_norm": 0.39453125,
"learning_rate": 1.4904347826086958e-05,
"loss": 0.1645,
"step": 11430
},
{
"epoch": 1.6584755898662609,
"grad_norm": 0.390625,
"learning_rate": 1.4846376811594203e-05,
"loss": 0.1608,
"step": 11440
},
{
"epoch": 1.6599253379725272,
"grad_norm": 0.421875,
"learning_rate": 1.478840579710145e-05,
"loss": 0.1737,
"step": 11450
},
{
"epoch": 1.661375086078794,
"grad_norm": 0.357421875,
"learning_rate": 1.4730434782608695e-05,
"loss": 0.1739,
"step": 11460
},
{
"epoch": 1.6628248341850602,
"grad_norm": 0.447265625,
"learning_rate": 1.4672463768115944e-05,
"loss": 0.1671,
"step": 11470
},
{
"epoch": 1.664274582291327,
"grad_norm": 0.3984375,
"learning_rate": 1.461449275362319e-05,
"loss": 0.1643,
"step": 11480
},
{
"epoch": 1.6657243303975933,
"grad_norm": 0.41796875,
"learning_rate": 1.4556521739130436e-05,
"loss": 0.1517,
"step": 11490
},
{
"epoch": 1.66717407850386,
"grad_norm": 0.45703125,
"learning_rate": 1.4498550724637683e-05,
"loss": 0.157,
"step": 11500
},
{
"epoch": 1.6686238266101265,
"grad_norm": 0.404296875,
"learning_rate": 1.4440579710144928e-05,
"loss": 0.1605,
"step": 11510
},
{
"epoch": 1.670073574716393,
"grad_norm": 0.412109375,
"learning_rate": 1.4382608695652176e-05,
"loss": 0.1696,
"step": 11520
},
{
"epoch": 1.6715233228226596,
"grad_norm": 0.462890625,
"learning_rate": 1.4324637681159422e-05,
"loss": 0.1732,
"step": 11530
},
{
"epoch": 1.672973070928926,
"grad_norm": 0.423828125,
"learning_rate": 1.4266666666666668e-05,
"loss": 0.1576,
"step": 11540
},
{
"epoch": 1.6744228190351926,
"grad_norm": 0.392578125,
"learning_rate": 1.4208695652173914e-05,
"loss": 0.1687,
"step": 11550
},
{
"epoch": 1.6758725671414592,
"grad_norm": 0.455078125,
"learning_rate": 1.4150724637681162e-05,
"loss": 0.1701,
"step": 11560
},
{
"epoch": 1.6773223152477257,
"grad_norm": 0.3671875,
"learning_rate": 1.4092753623188407e-05,
"loss": 0.1689,
"step": 11570
},
{
"epoch": 1.6787720633539922,
"grad_norm": 0.474609375,
"learning_rate": 1.4034782608695654e-05,
"loss": 0.1589,
"step": 11580
},
{
"epoch": 1.6802218114602587,
"grad_norm": 0.38671875,
"learning_rate": 1.39768115942029e-05,
"loss": 0.1494,
"step": 11590
},
{
"epoch": 1.6816715595665253,
"grad_norm": 0.470703125,
"learning_rate": 1.3918840579710146e-05,
"loss": 0.1483,
"step": 11600
},
{
"epoch": 1.683121307672792,
"grad_norm": 0.439453125,
"learning_rate": 1.3860869565217391e-05,
"loss": 0.1697,
"step": 11610
},
{
"epoch": 1.6845710557790583,
"grad_norm": 0.396484375,
"learning_rate": 1.380289855072464e-05,
"loss": 0.1488,
"step": 11620
},
{
"epoch": 1.686020803885325,
"grad_norm": 0.5234375,
"learning_rate": 1.3744927536231885e-05,
"loss": 0.1737,
"step": 11630
},
{
"epoch": 1.6874705519915913,
"grad_norm": 0.423828125,
"learning_rate": 1.3686956521739132e-05,
"loss": 0.1546,
"step": 11640
},
{
"epoch": 1.688920300097858,
"grad_norm": 0.4609375,
"learning_rate": 1.3628985507246377e-05,
"loss": 0.1612,
"step": 11650
},
{
"epoch": 1.6903700482041244,
"grad_norm": 0.380859375,
"learning_rate": 1.3571014492753625e-05,
"loss": 0.1667,
"step": 11660
},
{
"epoch": 1.6918197963103911,
"grad_norm": 0.48828125,
"learning_rate": 1.351304347826087e-05,
"loss": 0.1644,
"step": 11670
},
{
"epoch": 1.6932695444166574,
"grad_norm": 0.341796875,
"learning_rate": 1.3455072463768117e-05,
"loss": 0.1651,
"step": 11680
},
{
"epoch": 1.6947192925229242,
"grad_norm": 0.478515625,
"learning_rate": 1.3397101449275362e-05,
"loss": 0.1591,
"step": 11690
},
{
"epoch": 1.6961690406291907,
"grad_norm": 0.390625,
"learning_rate": 1.333913043478261e-05,
"loss": 0.158,
"step": 11700
},
{
"epoch": 1.6976187887354572,
"grad_norm": 0.388671875,
"learning_rate": 1.3281159420289856e-05,
"loss": 0.16,
"step": 11710
},
{
"epoch": 1.6990685368417238,
"grad_norm": 0.44140625,
"learning_rate": 1.3223188405797103e-05,
"loss": 0.1677,
"step": 11720
},
{
"epoch": 1.7005182849479903,
"grad_norm": 0.408203125,
"learning_rate": 1.3165217391304348e-05,
"loss": 0.1721,
"step": 11730
},
{
"epoch": 1.7019680330542568,
"grad_norm": 0.40625,
"learning_rate": 1.3107246376811595e-05,
"loss": 0.1553,
"step": 11740
},
{
"epoch": 1.7034177811605233,
"grad_norm": 0.43359375,
"learning_rate": 1.304927536231884e-05,
"loss": 0.1687,
"step": 11750
},
{
"epoch": 1.7048675292667899,
"grad_norm": 0.373046875,
"learning_rate": 1.2991304347826089e-05,
"loss": 0.157,
"step": 11760
},
{
"epoch": 1.7063172773730564,
"grad_norm": 0.443359375,
"learning_rate": 1.2933333333333334e-05,
"loss": 0.164,
"step": 11770
},
{
"epoch": 1.7077670254793231,
"grad_norm": 0.419921875,
"learning_rate": 1.287536231884058e-05,
"loss": 0.1438,
"step": 11780
},
{
"epoch": 1.7092167735855894,
"grad_norm": 0.423828125,
"learning_rate": 1.2817391304347827e-05,
"loss": 0.1627,
"step": 11790
},
{
"epoch": 1.7106665216918562,
"grad_norm": 0.4140625,
"learning_rate": 1.2759420289855074e-05,
"loss": 0.1566,
"step": 11800
},
{
"epoch": 1.7121162697981225,
"grad_norm": 0.388671875,
"learning_rate": 1.2701449275362321e-05,
"loss": 0.1527,
"step": 11810
},
{
"epoch": 1.7135660179043892,
"grad_norm": 0.40234375,
"learning_rate": 1.2643478260869566e-05,
"loss": 0.1421,
"step": 11820
},
{
"epoch": 1.7150157660106555,
"grad_norm": 0.4140625,
"learning_rate": 1.2585507246376813e-05,
"loss": 0.1601,
"step": 11830
},
{
"epoch": 1.7164655141169223,
"grad_norm": 0.443359375,
"learning_rate": 1.2527536231884058e-05,
"loss": 0.1553,
"step": 11840
},
{
"epoch": 1.7179152622231886,
"grad_norm": 0.419921875,
"learning_rate": 1.2469565217391307e-05,
"loss": 0.1658,
"step": 11850
},
{
"epoch": 1.7193650103294553,
"grad_norm": 0.39453125,
"learning_rate": 1.2411594202898552e-05,
"loss": 0.1658,
"step": 11860
},
{
"epoch": 1.7208147584357218,
"grad_norm": 0.373046875,
"learning_rate": 1.2353623188405799e-05,
"loss": 0.1683,
"step": 11870
},
{
"epoch": 1.7222645065419884,
"grad_norm": 0.34375,
"learning_rate": 1.2295652173913044e-05,
"loss": 0.1608,
"step": 11880
},
{
"epoch": 1.723714254648255,
"grad_norm": 0.41796875,
"learning_rate": 1.2237681159420292e-05,
"loss": 0.1641,
"step": 11890
},
{
"epoch": 1.7251640027545214,
"grad_norm": 0.44921875,
"learning_rate": 1.2179710144927537e-05,
"loss": 0.1563,
"step": 11900
},
{
"epoch": 1.726613750860788,
"grad_norm": 0.4375,
"learning_rate": 1.2121739130434784e-05,
"loss": 0.1753,
"step": 11910
},
{
"epoch": 1.7280634989670545,
"grad_norm": 0.4453125,
"learning_rate": 1.206376811594203e-05,
"loss": 0.1598,
"step": 11920
},
{
"epoch": 1.729513247073321,
"grad_norm": 0.384765625,
"learning_rate": 1.2005797101449276e-05,
"loss": 0.1582,
"step": 11930
},
{
"epoch": 1.7309629951795875,
"grad_norm": 0.4140625,
"learning_rate": 1.1947826086956521e-05,
"loss": 0.1698,
"step": 11940
},
{
"epoch": 1.732412743285854,
"grad_norm": 0.34765625,
"learning_rate": 1.188985507246377e-05,
"loss": 0.1577,
"step": 11950
},
{
"epoch": 1.7338624913921206,
"grad_norm": 0.46484375,
"learning_rate": 1.1831884057971015e-05,
"loss": 0.1585,
"step": 11960
},
{
"epoch": 1.7353122394983873,
"grad_norm": 0.40625,
"learning_rate": 1.1773913043478262e-05,
"loss": 0.1527,
"step": 11970
},
{
"epoch": 1.7367619876046536,
"grad_norm": 0.3984375,
"learning_rate": 1.1715942028985507e-05,
"loss": 0.1724,
"step": 11980
},
{
"epoch": 1.7382117357109204,
"grad_norm": 0.392578125,
"learning_rate": 1.1657971014492756e-05,
"loss": 0.1617,
"step": 11990
},
{
"epoch": 1.7396614838171867,
"grad_norm": 0.419921875,
"learning_rate": 1.16e-05,
"loss": 0.1632,
"step": 12000
},
{
"epoch": 1.7411112319234534,
"grad_norm": 0.39453125,
"learning_rate": 1.1542028985507248e-05,
"loss": 0.1619,
"step": 12010
},
{
"epoch": 1.7425609800297197,
"grad_norm": 0.416015625,
"learning_rate": 1.1484057971014493e-05,
"loss": 0.1579,
"step": 12020
},
{
"epoch": 1.7440107281359865,
"grad_norm": 0.34765625,
"learning_rate": 1.142608695652174e-05,
"loss": 0.1433,
"step": 12030
},
{
"epoch": 1.7454604762422528,
"grad_norm": 0.431640625,
"learning_rate": 1.1368115942028985e-05,
"loss": 0.1523,
"step": 12040
},
{
"epoch": 1.7469102243485195,
"grad_norm": 0.408203125,
"learning_rate": 1.1310144927536233e-05,
"loss": 0.1653,
"step": 12050
},
{
"epoch": 1.748359972454786,
"grad_norm": 0.421875,
"learning_rate": 1.1252173913043478e-05,
"loss": 0.168,
"step": 12060
},
{
"epoch": 1.7498097205610526,
"grad_norm": 0.458984375,
"learning_rate": 1.1194202898550725e-05,
"loss": 0.1732,
"step": 12070
},
{
"epoch": 1.751259468667319,
"grad_norm": 0.396484375,
"learning_rate": 1.113623188405797e-05,
"loss": 0.1631,
"step": 12080
},
{
"epoch": 1.7527092167735856,
"grad_norm": 0.431640625,
"learning_rate": 1.1078260869565219e-05,
"loss": 0.1585,
"step": 12090
},
{
"epoch": 1.7541589648798521,
"grad_norm": 0.50390625,
"learning_rate": 1.1020289855072466e-05,
"loss": 0.1796,
"step": 12100
},
{
"epoch": 1.7556087129861186,
"grad_norm": 0.37890625,
"learning_rate": 1.096231884057971e-05,
"loss": 0.1707,
"step": 12110
},
{
"epoch": 1.7570584610923852,
"grad_norm": 0.478515625,
"learning_rate": 1.0904347826086958e-05,
"loss": 0.1723,
"step": 12120
},
{
"epoch": 1.7585082091986517,
"grad_norm": 0.37109375,
"learning_rate": 1.0846376811594203e-05,
"loss": 0.1527,
"step": 12130
},
{
"epoch": 1.7599579573049182,
"grad_norm": 0.4453125,
"learning_rate": 1.0788405797101451e-05,
"loss": 0.1629,
"step": 12140
},
{
"epoch": 1.7614077054111847,
"grad_norm": 0.435546875,
"learning_rate": 1.0730434782608696e-05,
"loss": 0.1634,
"step": 12150
},
{
"epoch": 1.7628574535174515,
"grad_norm": 0.439453125,
"learning_rate": 1.0672463768115943e-05,
"loss": 0.1597,
"step": 12160
},
{
"epoch": 1.7643072016237178,
"grad_norm": 0.40625,
"learning_rate": 1.0614492753623188e-05,
"loss": 0.1778,
"step": 12170
},
{
"epoch": 1.7657569497299845,
"grad_norm": 0.2890625,
"learning_rate": 1.0556521739130437e-05,
"loss": 0.1666,
"step": 12180
},
{
"epoch": 1.7672066978362508,
"grad_norm": 0.353515625,
"learning_rate": 1.0498550724637682e-05,
"loss": 0.1567,
"step": 12190
},
{
"epoch": 1.7686564459425176,
"grad_norm": 0.421875,
"learning_rate": 1.0440579710144929e-05,
"loss": 0.1505,
"step": 12200
},
{
"epoch": 1.7701061940487839,
"grad_norm": 0.404296875,
"learning_rate": 1.0382608695652174e-05,
"loss": 0.1735,
"step": 12210
},
{
"epoch": 1.7715559421550506,
"grad_norm": 0.361328125,
"learning_rate": 1.0324637681159423e-05,
"loss": 0.169,
"step": 12220
},
{
"epoch": 1.773005690261317,
"grad_norm": 0.453125,
"learning_rate": 1.0266666666666668e-05,
"loss": 0.1534,
"step": 12230
},
{
"epoch": 1.7744554383675837,
"grad_norm": 0.427734375,
"learning_rate": 1.0208695652173915e-05,
"loss": 0.1584,
"step": 12240
},
{
"epoch": 1.7759051864738502,
"grad_norm": 0.41796875,
"learning_rate": 1.015072463768116e-05,
"loss": 0.1512,
"step": 12250
},
{
"epoch": 1.7773549345801167,
"grad_norm": 0.388671875,
"learning_rate": 1.0092753623188407e-05,
"loss": 0.1759,
"step": 12260
},
{
"epoch": 1.7788046826863833,
"grad_norm": 0.40625,
"learning_rate": 1.0034782608695652e-05,
"loss": 0.1634,
"step": 12270
},
{
"epoch": 1.7802544307926498,
"grad_norm": 0.427734375,
"learning_rate": 9.9768115942029e-06,
"loss": 0.1691,
"step": 12280
},
{
"epoch": 1.7817041788989163,
"grad_norm": 0.416015625,
"learning_rate": 9.918840579710145e-06,
"loss": 0.1578,
"step": 12290
},
{
"epoch": 1.7831539270051828,
"grad_norm": 0.4765625,
"learning_rate": 9.860869565217392e-06,
"loss": 0.1692,
"step": 12300
},
{
"epoch": 1.7846036751114494,
"grad_norm": 0.384765625,
"learning_rate": 9.802898550724639e-06,
"loss": 0.1765,
"step": 12310
},
{
"epoch": 1.7860534232177159,
"grad_norm": 0.380859375,
"learning_rate": 9.744927536231886e-06,
"loss": 0.154,
"step": 12320
},
{
"epoch": 1.7875031713239826,
"grad_norm": 0.419921875,
"learning_rate": 9.686956521739131e-06,
"loss": 0.1471,
"step": 12330
},
{
"epoch": 1.788952919430249,
"grad_norm": 0.53125,
"learning_rate": 9.628985507246378e-06,
"loss": 0.1638,
"step": 12340
},
{
"epoch": 1.7904026675365157,
"grad_norm": 0.376953125,
"learning_rate": 9.571014492753625e-06,
"loss": 0.1617,
"step": 12350
},
{
"epoch": 1.791852415642782,
"grad_norm": 0.4609375,
"learning_rate": 9.51304347826087e-06,
"loss": 0.1427,
"step": 12360
},
{
"epoch": 1.7933021637490487,
"grad_norm": 0.46875,
"learning_rate": 9.455072463768117e-06,
"loss": 0.1817,
"step": 12370
},
{
"epoch": 1.794751911855315,
"grad_norm": 0.4921875,
"learning_rate": 9.397101449275363e-06,
"loss": 0.1754,
"step": 12380
},
{
"epoch": 1.7962016599615818,
"grad_norm": 0.427734375,
"learning_rate": 9.33913043478261e-06,
"loss": 0.1667,
"step": 12390
},
{
"epoch": 1.797651408067848,
"grad_norm": 0.3828125,
"learning_rate": 9.281159420289855e-06,
"loss": 0.1713,
"step": 12400
},
{
"epoch": 1.7991011561741148,
"grad_norm": 0.3671875,
"learning_rate": 9.223188405797102e-06,
"loss": 0.158,
"step": 12410
},
{
"epoch": 1.8005509042803813,
"grad_norm": 0.38671875,
"learning_rate": 9.165217391304349e-06,
"loss": 0.1636,
"step": 12420
},
{
"epoch": 1.8020006523866479,
"grad_norm": 0.453125,
"learning_rate": 9.107246376811594e-06,
"loss": 0.1496,
"step": 12430
},
{
"epoch": 1.8034504004929144,
"grad_norm": 0.4375,
"learning_rate": 9.049275362318841e-06,
"loss": 0.1646,
"step": 12440
},
{
"epoch": 1.804900148599181,
"grad_norm": 0.408203125,
"learning_rate": 8.991304347826088e-06,
"loss": 0.1568,
"step": 12450
},
{
"epoch": 1.8063498967054474,
"grad_norm": 0.3984375,
"learning_rate": 8.933333333333333e-06,
"loss": 0.171,
"step": 12460
},
{
"epoch": 1.807799644811714,
"grad_norm": 0.384765625,
"learning_rate": 8.87536231884058e-06,
"loss": 0.1708,
"step": 12470
},
{
"epoch": 1.8092493929179805,
"grad_norm": 0.470703125,
"learning_rate": 8.817391304347827e-06,
"loss": 0.1695,
"step": 12480
},
{
"epoch": 1.810699141024247,
"grad_norm": 0.373046875,
"learning_rate": 8.759420289855074e-06,
"loss": 0.1614,
"step": 12490
},
{
"epoch": 1.8121488891305135,
"grad_norm": 0.4140625,
"learning_rate": 8.701449275362319e-06,
"loss": 0.1735,
"step": 12500
},
{
"epoch": 1.81359863723678,
"grad_norm": 0.482421875,
"learning_rate": 8.643478260869566e-06,
"loss": 0.1518,
"step": 12510
},
{
"epoch": 1.8150483853430468,
"grad_norm": 0.396484375,
"learning_rate": 8.585507246376812e-06,
"loss": 0.1622,
"step": 12520
},
{
"epoch": 1.816498133449313,
"grad_norm": 0.43359375,
"learning_rate": 8.527536231884058e-06,
"loss": 0.1619,
"step": 12530
},
{
"epoch": 1.8179478815555798,
"grad_norm": 0.466796875,
"learning_rate": 8.469565217391304e-06,
"loss": 0.1574,
"step": 12540
},
{
"epoch": 1.8193976296618461,
"grad_norm": 0.423828125,
"learning_rate": 8.411594202898551e-06,
"loss": 0.1625,
"step": 12550
},
{
"epoch": 1.820847377768113,
"grad_norm": 0.39453125,
"learning_rate": 8.353623188405798e-06,
"loss": 0.1703,
"step": 12560
},
{
"epoch": 1.8222971258743792,
"grad_norm": 0.4453125,
"learning_rate": 8.295652173913045e-06,
"loss": 0.1636,
"step": 12570
},
{
"epoch": 1.823746873980646,
"grad_norm": 0.40234375,
"learning_rate": 8.237681159420292e-06,
"loss": 0.1744,
"step": 12580
},
{
"epoch": 1.8251966220869122,
"grad_norm": 0.361328125,
"learning_rate": 8.179710144927537e-06,
"loss": 0.1526,
"step": 12590
},
{
"epoch": 1.826646370193179,
"grad_norm": 0.482421875,
"learning_rate": 8.121739130434784e-06,
"loss": 0.1622,
"step": 12600
},
{
"epoch": 1.8280961182994455,
"grad_norm": 0.40625,
"learning_rate": 8.06376811594203e-06,
"loss": 0.1735,
"step": 12610
},
{
"epoch": 1.829545866405712,
"grad_norm": 0.416015625,
"learning_rate": 8.005797101449276e-06,
"loss": 0.1638,
"step": 12620
},
{
"epoch": 1.8309956145119786,
"grad_norm": 0.3671875,
"learning_rate": 7.947826086956522e-06,
"loss": 0.1491,
"step": 12630
},
{
"epoch": 1.832445362618245,
"grad_norm": 0.376953125,
"learning_rate": 7.88985507246377e-06,
"loss": 0.1594,
"step": 12640
},
{
"epoch": 1.8338951107245116,
"grad_norm": 0.439453125,
"learning_rate": 7.831884057971016e-06,
"loss": 0.1722,
"step": 12650
},
{
"epoch": 1.8353448588307781,
"grad_norm": 0.466796875,
"learning_rate": 7.773913043478261e-06,
"loss": 0.1673,
"step": 12660
},
{
"epoch": 1.8367946069370447,
"grad_norm": 0.390625,
"learning_rate": 7.715942028985508e-06,
"loss": 0.1715,
"step": 12670
},
{
"epoch": 1.8382443550433112,
"grad_norm": 0.421875,
"learning_rate": 7.657971014492755e-06,
"loss": 0.1547,
"step": 12680
},
{
"epoch": 1.8396941031495777,
"grad_norm": 0.431640625,
"learning_rate": 7.600000000000001e-06,
"loss": 0.1657,
"step": 12690
},
{
"epoch": 1.8411438512558442,
"grad_norm": 0.37109375,
"learning_rate": 7.542028985507247e-06,
"loss": 0.1711,
"step": 12700
},
{
"epoch": 1.842593599362111,
"grad_norm": 0.423828125,
"learning_rate": 7.484057971014494e-06,
"loss": 0.1641,
"step": 12710
},
{
"epoch": 1.8440433474683773,
"grad_norm": 0.384765625,
"learning_rate": 7.42608695652174e-06,
"loss": 0.1699,
"step": 12720
},
{
"epoch": 1.845493095574644,
"grad_norm": 0.431640625,
"learning_rate": 7.368115942028986e-06,
"loss": 0.1648,
"step": 12730
},
{
"epoch": 1.8469428436809103,
"grad_norm": 0.3984375,
"learning_rate": 7.3101449275362326e-06,
"loss": 0.1614,
"step": 12740
},
{
"epoch": 1.848392591787177,
"grad_norm": 0.44921875,
"learning_rate": 7.2521739130434785e-06,
"loss": 0.1845,
"step": 12750
},
{
"epoch": 1.8498423398934434,
"grad_norm": 0.431640625,
"learning_rate": 7.194202898550725e-06,
"loss": 0.1782,
"step": 12760
},
{
"epoch": 1.8512920879997101,
"grad_norm": 0.4765625,
"learning_rate": 7.136231884057971e-06,
"loss": 0.1585,
"step": 12770
},
{
"epoch": 1.8527418361059764,
"grad_norm": 0.39453125,
"learning_rate": 7.078260869565217e-06,
"loss": 0.1528,
"step": 12780
},
{
"epoch": 1.8541915842122432,
"grad_norm": 0.3125,
"learning_rate": 7.020289855072464e-06,
"loss": 0.1595,
"step": 12790
},
{
"epoch": 1.8556413323185097,
"grad_norm": 0.451171875,
"learning_rate": 6.96231884057971e-06,
"loss": 0.1588,
"step": 12800
},
{
"epoch": 1.8570910804247762,
"grad_norm": 0.48828125,
"learning_rate": 6.904347826086957e-06,
"loss": 0.1579,
"step": 12810
},
{
"epoch": 1.8585408285310427,
"grad_norm": 0.462890625,
"learning_rate": 6.846376811594203e-06,
"loss": 0.1577,
"step": 12820
},
{
"epoch": 1.8599905766373093,
"grad_norm": 0.37890625,
"learning_rate": 6.78840579710145e-06,
"loss": 0.1769,
"step": 12830
},
{
"epoch": 1.8614403247435758,
"grad_norm": 0.337890625,
"learning_rate": 6.730434782608696e-06,
"loss": 0.1668,
"step": 12840
},
{
"epoch": 1.8628900728498423,
"grad_norm": 0.4140625,
"learning_rate": 6.672463768115942e-06,
"loss": 0.1521,
"step": 12850
},
{
"epoch": 1.8643398209561088,
"grad_norm": 0.392578125,
"learning_rate": 6.614492753623189e-06,
"loss": 0.167,
"step": 12860
},
{
"epoch": 1.8657895690623754,
"grad_norm": 0.431640625,
"learning_rate": 6.556521739130435e-06,
"loss": 0.1663,
"step": 12870
},
{
"epoch": 1.867239317168642,
"grad_norm": 0.40625,
"learning_rate": 6.498550724637682e-06,
"loss": 0.1714,
"step": 12880
},
{
"epoch": 1.8686890652749084,
"grad_norm": 0.431640625,
"learning_rate": 6.440579710144928e-06,
"loss": 0.1653,
"step": 12890
},
{
"epoch": 1.8701388133811752,
"grad_norm": 0.3984375,
"learning_rate": 6.382608695652175e-06,
"loss": 0.1588,
"step": 12900
},
{
"epoch": 1.8715885614874415,
"grad_norm": 0.365234375,
"learning_rate": 6.324637681159421e-06,
"loss": 0.153,
"step": 12910
},
{
"epoch": 1.8730383095937082,
"grad_norm": 0.365234375,
"learning_rate": 6.266666666666668e-06,
"loss": 0.1585,
"step": 12920
},
{
"epoch": 1.8744880576999745,
"grad_norm": 0.45703125,
"learning_rate": 6.208695652173914e-06,
"loss": 0.1595,
"step": 12930
},
{
"epoch": 1.8759378058062413,
"grad_norm": 0.421875,
"learning_rate": 6.15072463768116e-06,
"loss": 0.1784,
"step": 12940
},
{
"epoch": 1.8773875539125076,
"grad_norm": 0.388671875,
"learning_rate": 6.092753623188407e-06,
"loss": 0.167,
"step": 12950
},
{
"epoch": 1.8788373020187743,
"grad_norm": 0.380859375,
"learning_rate": 6.034782608695653e-06,
"loss": 0.1606,
"step": 12960
},
{
"epoch": 1.8802870501250408,
"grad_norm": 0.447265625,
"learning_rate": 5.9768115942028996e-06,
"loss": 0.1576,
"step": 12970
},
{
"epoch": 1.8817367982313074,
"grad_norm": 0.404296875,
"learning_rate": 5.9188405797101455e-06,
"loss": 0.1736,
"step": 12980
},
{
"epoch": 1.8831865463375739,
"grad_norm": 0.46484375,
"learning_rate": 5.8608695652173915e-06,
"loss": 0.1703,
"step": 12990
},
{
"epoch": 1.8846362944438404,
"grad_norm": 0.365234375,
"learning_rate": 5.802898550724638e-06,
"loss": 0.1715,
"step": 13000
},
{
"epoch": 1.886086042550107,
"grad_norm": 0.40234375,
"learning_rate": 5.744927536231884e-06,
"loss": 0.1536,
"step": 13010
},
{
"epoch": 1.8875357906563734,
"grad_norm": 0.3828125,
"learning_rate": 5.686956521739131e-06,
"loss": 0.1657,
"step": 13020
},
{
"epoch": 1.88898553876264,
"grad_norm": 0.419921875,
"learning_rate": 5.628985507246377e-06,
"loss": 0.1712,
"step": 13030
},
{
"epoch": 1.8904352868689065,
"grad_norm": 0.4375,
"learning_rate": 5.571014492753624e-06,
"loss": 0.1628,
"step": 13040
},
{
"epoch": 1.891885034975173,
"grad_norm": 0.4921875,
"learning_rate": 5.51304347826087e-06,
"loss": 0.1718,
"step": 13050
},
{
"epoch": 1.8933347830814395,
"grad_norm": 0.408203125,
"learning_rate": 5.455072463768116e-06,
"loss": 0.1642,
"step": 13060
},
{
"epoch": 1.8947845311877063,
"grad_norm": 0.4765625,
"learning_rate": 5.397101449275363e-06,
"loss": 0.1583,
"step": 13070
},
{
"epoch": 1.8962342792939726,
"grad_norm": 0.41796875,
"learning_rate": 5.339130434782609e-06,
"loss": 0.1727,
"step": 13080
},
{
"epoch": 1.8976840274002393,
"grad_norm": 0.359375,
"learning_rate": 5.281159420289856e-06,
"loss": 0.1627,
"step": 13090
},
{
"epoch": 1.8991337755065056,
"grad_norm": 0.384765625,
"learning_rate": 5.223188405797102e-06,
"loss": 0.1543,
"step": 13100
},
{
"epoch": 1.9005835236127724,
"grad_norm": 0.40234375,
"learning_rate": 5.165217391304348e-06,
"loss": 0.163,
"step": 13110
},
{
"epoch": 1.9020332717190387,
"grad_norm": 0.423828125,
"learning_rate": 5.1072463768115944e-06,
"loss": 0.1671,
"step": 13120
},
{
"epoch": 1.9034830198253054,
"grad_norm": 0.37109375,
"learning_rate": 5.0492753623188404e-06,
"loss": 0.1604,
"step": 13130
},
{
"epoch": 1.9049327679315717,
"grad_norm": 0.427734375,
"learning_rate": 4.991304347826087e-06,
"loss": 0.163,
"step": 13140
},
{
"epoch": 1.9063825160378385,
"grad_norm": 0.4296875,
"learning_rate": 4.933333333333334e-06,
"loss": 0.1603,
"step": 13150
},
{
"epoch": 1.907832264144105,
"grad_norm": 0.388671875,
"learning_rate": 4.87536231884058e-06,
"loss": 0.1695,
"step": 13160
},
{
"epoch": 1.9092820122503715,
"grad_norm": 0.40625,
"learning_rate": 4.817391304347827e-06,
"loss": 0.1585,
"step": 13170
},
{
"epoch": 1.910731760356638,
"grad_norm": 0.421875,
"learning_rate": 4.759420289855073e-06,
"loss": 0.1646,
"step": 13180
},
{
"epoch": 1.9121815084629046,
"grad_norm": 0.447265625,
"learning_rate": 4.701449275362319e-06,
"loss": 0.1657,
"step": 13190
},
{
"epoch": 1.913631256569171,
"grad_norm": 0.380859375,
"learning_rate": 4.643478260869566e-06,
"loss": 0.1645,
"step": 13200
},
{
"epoch": 1.9150810046754376,
"grad_norm": 0.41796875,
"learning_rate": 4.585507246376812e-06,
"loss": 0.1639,
"step": 13210
},
{
"epoch": 1.9165307527817041,
"grad_norm": 0.45703125,
"learning_rate": 4.5275362318840585e-06,
"loss": 0.1629,
"step": 13220
},
{
"epoch": 1.9179805008879707,
"grad_norm": 0.388671875,
"learning_rate": 4.4695652173913045e-06,
"loss": 0.1681,
"step": 13230
},
{
"epoch": 1.9194302489942372,
"grad_norm": 0.380859375,
"learning_rate": 4.4115942028985505e-06,
"loss": 0.1516,
"step": 13240
},
{
"epoch": 1.9208799971005037,
"grad_norm": 0.466796875,
"learning_rate": 4.353623188405797e-06,
"loss": 0.1613,
"step": 13250
},
{
"epoch": 1.9223297452067705,
"grad_norm": 0.375,
"learning_rate": 4.295652173913043e-06,
"loss": 0.1511,
"step": 13260
},
{
"epoch": 1.9237794933130368,
"grad_norm": 0.37109375,
"learning_rate": 4.23768115942029e-06,
"loss": 0.1612,
"step": 13270
},
{
"epoch": 1.9252292414193035,
"grad_norm": 0.458984375,
"learning_rate": 4.179710144927537e-06,
"loss": 0.1603,
"step": 13280
},
{
"epoch": 1.9266789895255698,
"grad_norm": 0.41015625,
"learning_rate": 4.121739130434783e-06,
"loss": 0.1641,
"step": 13290
},
{
"epoch": 1.9281287376318366,
"grad_norm": 0.4375,
"learning_rate": 4.06376811594203e-06,
"loss": 0.1736,
"step": 13300
},
{
"epoch": 1.9295784857381029,
"grad_norm": 0.455078125,
"learning_rate": 4.005797101449276e-06,
"loss": 0.1646,
"step": 13310
},
{
"epoch": 1.9310282338443696,
"grad_norm": 0.4375,
"learning_rate": 3.947826086956522e-06,
"loss": 0.1586,
"step": 13320
},
{
"epoch": 1.932477981950636,
"grad_norm": 0.37109375,
"learning_rate": 3.889855072463769e-06,
"loss": 0.1689,
"step": 13330
},
{
"epoch": 1.9339277300569027,
"grad_norm": 0.431640625,
"learning_rate": 3.831884057971015e-06,
"loss": 0.1634,
"step": 13340
},
{
"epoch": 1.9353774781631692,
"grad_norm": 0.515625,
"learning_rate": 3.773913043478261e-06,
"loss": 0.1897,
"step": 13350
},
{
"epoch": 1.9368272262694357,
"grad_norm": 0.396484375,
"learning_rate": 3.7159420289855074e-06,
"loss": 0.1631,
"step": 13360
},
{
"epoch": 1.9382769743757022,
"grad_norm": 0.4140625,
"learning_rate": 3.657971014492754e-06,
"loss": 0.1614,
"step": 13370
},
{
"epoch": 1.9397267224819688,
"grad_norm": 0.47265625,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.1672,
"step": 13380
},
{
"epoch": 1.9411764705882353,
"grad_norm": 0.4609375,
"learning_rate": 3.5420289855072467e-06,
"loss": 0.1661,
"step": 13390
},
{
"epoch": 1.9426262186945018,
"grad_norm": 0.380859375,
"learning_rate": 3.4840579710144927e-06,
"loss": 0.1473,
"step": 13400
},
{
"epoch": 1.9440759668007683,
"grad_norm": 0.384765625,
"learning_rate": 3.426086956521739e-06,
"loss": 0.1603,
"step": 13410
},
{
"epoch": 1.9455257149070349,
"grad_norm": 0.443359375,
"learning_rate": 3.3681159420289855e-06,
"loss": 0.1655,
"step": 13420
},
{
"epoch": 1.9469754630133016,
"grad_norm": 0.453125,
"learning_rate": 3.3101449275362323e-06,
"loss": 0.1634,
"step": 13430
},
{
"epoch": 1.948425211119568,
"grad_norm": 0.453125,
"learning_rate": 3.2521739130434787e-06,
"loss": 0.1719,
"step": 13440
},
{
"epoch": 1.9498749592258346,
"grad_norm": 0.36328125,
"learning_rate": 3.194202898550725e-06,
"loss": 0.1579,
"step": 13450
},
{
"epoch": 1.951324707332101,
"grad_norm": 0.376953125,
"learning_rate": 3.1362318840579715e-06,
"loss": 0.1578,
"step": 13460
},
{
"epoch": 1.9527744554383677,
"grad_norm": 0.392578125,
"learning_rate": 3.078260869565218e-06,
"loss": 0.1557,
"step": 13470
},
{
"epoch": 1.954224203544634,
"grad_norm": 0.4140625,
"learning_rate": 3.020289855072464e-06,
"loss": 0.1553,
"step": 13480
},
{
"epoch": 1.9556739516509007,
"grad_norm": 0.3984375,
"learning_rate": 2.9623188405797103e-06,
"loss": 0.1607,
"step": 13490
},
{
"epoch": 1.957123699757167,
"grad_norm": 0.3984375,
"learning_rate": 2.9043478260869568e-06,
"loss": 0.1782,
"step": 13500
}
],
"logging_steps": 10,
"max_steps": 14000,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 9.711237437485326e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}