| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.04079993472010445, | |
| "eval_steps": 200000, | |
| "global_step": 25500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0001599997440004096, | |
| "grad_norm": 84.32501983642578, | |
| "learning_rate": 3.103950336794611e-08, | |
| "loss": 10.8792, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0003199994880008192, | |
| "grad_norm": 60.63747024536133, | |
| "learning_rate": 6.303899137613798e-08, | |
| "loss": 10.9284, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.00047999923200122877, | |
| "grad_norm": 55.71075439453125, | |
| "learning_rate": 9.503847938432986e-08, | |
| "loss": 10.6466, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0006399989760016384, | |
| "grad_norm": 57.63307189941406, | |
| "learning_rate": 1.2703796739252173e-07, | |
| "loss": 10.841, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.000799998720002048, | |
| "grad_norm": 89.1032485961914, | |
| "learning_rate": 1.590374554007136e-07, | |
| "loss": 10.8094, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0009599984640024575, | |
| "grad_norm": 57.2479362487793, | |
| "learning_rate": 1.9103694340890547e-07, | |
| "loss": 10.4323, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.0011199982080028672, | |
| "grad_norm": 51.17530059814453, | |
| "learning_rate": 2.2303643141709733e-07, | |
| "loss": 10.3032, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.0012799979520032767, | |
| "grad_norm": 60.76409912109375, | |
| "learning_rate": 2.550359194252892e-07, | |
| "loss": 10.4006, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.0014399976960036865, | |
| "grad_norm": 67.00859069824219, | |
| "learning_rate": 2.870354074334811e-07, | |
| "loss": 10.4743, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.001599997440004096, | |
| "grad_norm": 68.4343032836914, | |
| "learning_rate": 3.19034895441673e-07, | |
| "loss": 10.2334, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.0017599971840045055, | |
| "grad_norm": 48.704105377197266, | |
| "learning_rate": 3.510343834498648e-07, | |
| "loss": 10.0135, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.001919996928004915, | |
| "grad_norm": 45.30134963989258, | |
| "learning_rate": 3.830338714580567e-07, | |
| "loss": 9.7874, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.002079996672005325, | |
| "grad_norm": 84.56024169921875, | |
| "learning_rate": 4.150333594662486e-07, | |
| "loss": 9.7419, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.0022399964160057344, | |
| "grad_norm": 45.73213195800781, | |
| "learning_rate": 4.470328474744404e-07, | |
| "loss": 9.7412, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.002399996160006144, | |
| "grad_norm": 50.21996307373047, | |
| "learning_rate": 4.790323354826324e-07, | |
| "loss": 9.4585, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.0025599959040065534, | |
| "grad_norm": 59.475799560546875, | |
| "learning_rate": 5.110318234908241e-07, | |
| "loss": 9.5339, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.002719995648006963, | |
| "grad_norm": 82.53620910644531, | |
| "learning_rate": 5.43031311499016e-07, | |
| "loss": 9.4345, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.002879995392007373, | |
| "grad_norm": 39.44235610961914, | |
| "learning_rate": 5.750307995072079e-07, | |
| "loss": 9.1733, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.0030399951360077825, | |
| "grad_norm": 37.58698654174805, | |
| "learning_rate": 6.070302875153998e-07, | |
| "loss": 8.9952, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.003199994880008192, | |
| "grad_norm": 40.35204315185547, | |
| "learning_rate": 6.390297755235917e-07, | |
| "loss": 8.9669, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.0033599946240086016, | |
| "grad_norm": 57.84451675415039, | |
| "learning_rate": 6.707092686517017e-07, | |
| "loss": 8.8152, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.003519994368009011, | |
| "grad_norm": 40.126953125, | |
| "learning_rate": 7.027087566598935e-07, | |
| "loss": 8.7936, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.0036799941120094206, | |
| "grad_norm": 35.435707092285156, | |
| "learning_rate": 7.347082446680854e-07, | |
| "loss": 8.6771, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.00383999385600983, | |
| "grad_norm": 42.3509635925293, | |
| "learning_rate": 7.667077326762773e-07, | |
| "loss": 8.4648, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.00399999360001024, | |
| "grad_norm": 33.58556365966797, | |
| "learning_rate": 7.987072206844691e-07, | |
| "loss": 8.5764, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.00415999334401065, | |
| "grad_norm": 34.014678955078125, | |
| "learning_rate": 8.30706708692661e-07, | |
| "loss": 8.4587, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.004319993088011059, | |
| "grad_norm": 36.43831253051758, | |
| "learning_rate": 8.627061967008528e-07, | |
| "loss": 8.2966, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.004479992832011469, | |
| "grad_norm": 31.411684036254883, | |
| "learning_rate": 8.947056847090448e-07, | |
| "loss": 8.2329, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.004639992576011879, | |
| "grad_norm": 47.570125579833984, | |
| "learning_rate": 9.267051727172366e-07, | |
| "loss": 8.1415, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.004799992320012288, | |
| "grad_norm": 30.771928787231445, | |
| "learning_rate": 9.587046607254284e-07, | |
| "loss": 8.0404, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.004959992064012698, | |
| "grad_norm": 26.92803955078125, | |
| "learning_rate": 9.907041487336204e-07, | |
| "loss": 7.9698, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.005119991808013107, | |
| "grad_norm": 31.121917724609375, | |
| "learning_rate": 1.0227036367418122e-06, | |
| "loss": 7.9205, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.005279991552013517, | |
| "grad_norm": 33.991416931152344, | |
| "learning_rate": 1.054703124750004e-06, | |
| "loss": 7.8314, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.005439991296013926, | |
| "grad_norm": 31.278030395507812, | |
| "learning_rate": 1.086702612758196e-06, | |
| "loss": 7.8369, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.005599991040014336, | |
| "grad_norm": 28.116140365600586, | |
| "learning_rate": 1.1187021007663878e-06, | |
| "loss": 7.6403, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.005759990784014746, | |
| "grad_norm": 30.954113006591797, | |
| "learning_rate": 1.1507015887745798e-06, | |
| "loss": 7.5842, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.005919990528015155, | |
| "grad_norm": 36.53567886352539, | |
| "learning_rate": 1.1827010767827715e-06, | |
| "loss": 7.5812, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.006079990272015565, | |
| "grad_norm": 36.81153106689453, | |
| "learning_rate": 1.2147005647909635e-06, | |
| "loss": 7.4335, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.006239990016015974, | |
| "grad_norm": 22.556833267211914, | |
| "learning_rate": 1.2467000527991553e-06, | |
| "loss": 7.4917, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.006399989760016384, | |
| "grad_norm": 40.195579528808594, | |
| "learning_rate": 1.278699540807347e-06, | |
| "loss": 7.3204, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.006559989504016793, | |
| "grad_norm": 21.862642288208008, | |
| "learning_rate": 1.310699028815539e-06, | |
| "loss": 7.2971, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.006719989248017203, | |
| "grad_norm": 29.61161231994629, | |
| "learning_rate": 1.3426985168237308e-06, | |
| "loss": 7.2233, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.006879988992017613, | |
| "grad_norm": 22.342451095581055, | |
| "learning_rate": 1.3746980048319228e-06, | |
| "loss": 7.2081, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.007039988736018022, | |
| "grad_norm": 36.36684799194336, | |
| "learning_rate": 1.4066974928401148e-06, | |
| "loss": 7.1364, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.007199988480018432, | |
| "grad_norm": 25.563953399658203, | |
| "learning_rate": 1.4386969808483064e-06, | |
| "loss": 7.0663, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.007359988224018841, | |
| "grad_norm": 22.50385856628418, | |
| "learning_rate": 1.4706964688564984e-06, | |
| "loss": 6.9601, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.007519987968019251, | |
| "grad_norm": 31.61231231689453, | |
| "learning_rate": 1.5026959568646904e-06, | |
| "loss": 6.9546, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.00767998771201966, | |
| "grad_norm": 18.862520217895508, | |
| "learning_rate": 1.5346954448728822e-06, | |
| "loss": 6.9019, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.00783998745602007, | |
| "grad_norm": 32.594539642333984, | |
| "learning_rate": 1.5666949328810741e-06, | |
| "loss": 6.8801, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.00799998720002048, | |
| "grad_norm": 21.06804084777832, | |
| "learning_rate": 1.598694420889266e-06, | |
| "loss": 6.7734, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.00815998694402089, | |
| "grad_norm": 31.783803939819336, | |
| "learning_rate": 1.6303739140173757e-06, | |
| "loss": 6.7648, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.0083199866880213, | |
| "grad_norm": 49.79084777832031, | |
| "learning_rate": 1.6623734020255677e-06, | |
| "loss": 6.7498, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.008479986432021708, | |
| "grad_norm": 26.1977481842041, | |
| "learning_rate": 1.6943728900337597e-06, | |
| "loss": 6.6872, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.008639986176022118, | |
| "grad_norm": 21.942001342773438, | |
| "learning_rate": 1.7263723780419515e-06, | |
| "loss": 6.6264, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.008799985920022528, | |
| "grad_norm": 32.572959899902344, | |
| "learning_rate": 1.7583718660501433e-06, | |
| "loss": 6.579, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.008959985664022938, | |
| "grad_norm": 20.728240966796875, | |
| "learning_rate": 1.7903713540583353e-06, | |
| "loss": 6.6001, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.009119985408023347, | |
| "grad_norm": 24.334205627441406, | |
| "learning_rate": 1.822370842066527e-06, | |
| "loss": 6.5971, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.009279985152023757, | |
| "grad_norm": 27.025753021240234, | |
| "learning_rate": 1.854370330074719e-06, | |
| "loss": 6.4694, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.009439984896024167, | |
| "grad_norm": 23.506013870239258, | |
| "learning_rate": 1.8863698180829106e-06, | |
| "loss": 6.3983, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.009599984640024576, | |
| "grad_norm": 35.65713882446289, | |
| "learning_rate": 1.9183693060911026e-06, | |
| "loss": 6.4477, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.009759984384024985, | |
| "grad_norm": 22.977373123168945, | |
| "learning_rate": 1.950368794099295e-06, | |
| "loss": 6.4308, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.009919984128025396, | |
| "grad_norm": 22.127635955810547, | |
| "learning_rate": 1.982368282107486e-06, | |
| "loss": 6.4248, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.010079983872025805, | |
| "grad_norm": 33.53960418701172, | |
| "learning_rate": 2.0143677701156784e-06, | |
| "loss": 6.2642, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.010239983616026214, | |
| "grad_norm": 24.39597511291504, | |
| "learning_rate": 2.04636725812387e-06, | |
| "loss": 6.2763, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.010399983360026625, | |
| "grad_norm": 24.471288681030273, | |
| "learning_rate": 2.078366746132062e-06, | |
| "loss": 6.3878, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.010559983104027034, | |
| "grad_norm": 34.05498123168945, | |
| "learning_rate": 2.110366234140254e-06, | |
| "loss": 6.2601, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.010719982848027443, | |
| "grad_norm": 30.60455322265625, | |
| "learning_rate": 2.142365722148446e-06, | |
| "loss": 6.1789, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.010879982592027852, | |
| "grad_norm": 27.737686157226562, | |
| "learning_rate": 2.1743652101566377e-06, | |
| "loss": 6.1773, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.011039982336028263, | |
| "grad_norm": 24.246810913085938, | |
| "learning_rate": 2.2063646981648294e-06, | |
| "loss": 6.1439, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.011199982080028672, | |
| "grad_norm": 27.53533363342285, | |
| "learning_rate": 2.2383641861730217e-06, | |
| "loss": 6.1863, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.011359981824029081, | |
| "grad_norm": 27.81687355041504, | |
| "learning_rate": 2.2703636741812134e-06, | |
| "loss": 6.0513, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.011519981568029492, | |
| "grad_norm": 28.00519371032715, | |
| "learning_rate": 2.3020431673093234e-06, | |
| "loss": 6.0671, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.011679981312029901, | |
| "grad_norm": 29.347061157226562, | |
| "learning_rate": 2.3340426553175152e-06, | |
| "loss": 6.0212, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.01183998105603031, | |
| "grad_norm": 29.621200561523438, | |
| "learning_rate": 2.365722148445625e-06, | |
| "loss": 6.0043, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.011999980800030719, | |
| "grad_norm": 31.689117431640625, | |
| "learning_rate": 2.397721636453817e-06, | |
| "loss": 6.0166, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.01215998054403113, | |
| "grad_norm": 46.79508972167969, | |
| "learning_rate": 2.429721124462009e-06, | |
| "loss": 5.9754, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.012319980288031539, | |
| "grad_norm": 28.857833862304688, | |
| "learning_rate": 2.4617206124702006e-06, | |
| "loss": 5.9211, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.012479980032031948, | |
| "grad_norm": 58.34132766723633, | |
| "learning_rate": 2.4937201004783928e-06, | |
| "loss": 5.7867, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.012639979776032359, | |
| "grad_norm": 49.33425521850586, | |
| "learning_rate": 2.525719588486584e-06, | |
| "loss": 5.8534, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.012799979520032768, | |
| "grad_norm": 39.17392349243164, | |
| "learning_rate": 2.5577190764947763e-06, | |
| "loss": 5.7708, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.012959979264033177, | |
| "grad_norm": 45.94136428833008, | |
| "learning_rate": 2.589718564502968e-06, | |
| "loss": 5.8328, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.013119979008033586, | |
| "grad_norm": 36.19196319580078, | |
| "learning_rate": 2.6217180525111603e-06, | |
| "loss": 5.7417, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.013279978752033997, | |
| "grad_norm": 37.051658630371094, | |
| "learning_rate": 2.653717540519352e-06, | |
| "loss": 5.8097, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.013439978496034406, | |
| "grad_norm": 90.0757064819336, | |
| "learning_rate": 2.6857170285275435e-06, | |
| "loss": 5.7578, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.013599978240034815, | |
| "grad_norm": 92.7857894897461, | |
| "learning_rate": 2.7177165165357357e-06, | |
| "loss": 5.643, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.013759977984035226, | |
| "grad_norm": 26.648149490356445, | |
| "learning_rate": 2.7497160045439274e-06, | |
| "loss": 5.6401, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.013919977728035635, | |
| "grad_norm": 45.42919158935547, | |
| "learning_rate": 2.7817154925521196e-06, | |
| "loss": 5.6627, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.014079977472036044, | |
| "grad_norm": 48.3182487487793, | |
| "learning_rate": 2.8137149805603114e-06, | |
| "loss": 5.6167, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.014239977216036454, | |
| "grad_norm": 51.463653564453125, | |
| "learning_rate": 2.8457144685685028e-06, | |
| "loss": 5.6539, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.014399976960036864, | |
| "grad_norm": 47.81680679321289, | |
| "learning_rate": 2.877713956576695e-06, | |
| "loss": 5.4513, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.014559976704037273, | |
| "grad_norm": 42.410667419433594, | |
| "learning_rate": 2.9097134445848868e-06, | |
| "loss": 5.4132, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.014719976448037683, | |
| "grad_norm": 55.33562088012695, | |
| "learning_rate": 2.941712932593079e-06, | |
| "loss": 5.4714, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.014879976192038093, | |
| "grad_norm": 38.538246154785156, | |
| "learning_rate": 2.9737124206012707e-06, | |
| "loss": 5.4786, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.015039975936038502, | |
| "grad_norm": 43.42023468017578, | |
| "learning_rate": 3.0057119086094625e-06, | |
| "loss": 5.3928, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.015199975680038912, | |
| "grad_norm": 24.861467361450195, | |
| "learning_rate": 3.037391401737572e-06, | |
| "loss": 5.4774, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.01535997542403932, | |
| "grad_norm": 98.92141723632812, | |
| "learning_rate": 3.0693908897457643e-06, | |
| "loss": 5.2881, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.015519975168039732, | |
| "grad_norm": 62.839866638183594, | |
| "learning_rate": 3.101390377753956e-06, | |
| "loss": 5.3699, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.01567997491204014, | |
| "grad_norm": 46.006065368652344, | |
| "learning_rate": 3.133069870882066e-06, | |
| "loss": 5.1483, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.01583997465604055, | |
| "grad_norm": 89.62445068359375, | |
| "learning_rate": 3.1650693588902583e-06, | |
| "loss": 5.3051, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.01599997440004096, | |
| "grad_norm": 41.113609313964844, | |
| "learning_rate": 3.19706884689845e-06, | |
| "loss": 5.2546, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.01615997414404137, | |
| "grad_norm": 46.37376403808594, | |
| "learning_rate": 3.2290683349066414e-06, | |
| "loss": 5.2314, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.01631997388804178, | |
| "grad_norm": 60.3846321105957, | |
| "learning_rate": 3.2610678229148337e-06, | |
| "loss": 5.1783, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.016479973632042188, | |
| "grad_norm": 145.4359130859375, | |
| "learning_rate": 3.2930673109230254e-06, | |
| "loss": 5.2074, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.0166399733760426, | |
| "grad_norm": 69.00183868408203, | |
| "learning_rate": 3.325066798931217e-06, | |
| "loss": 5.2825, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.01679997312004301, | |
| "grad_norm": 48.03580093383789, | |
| "learning_rate": 3.3570662869394094e-06, | |
| "loss": 5.1715, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.016959972864043417, | |
| "grad_norm": 58.56736755371094, | |
| "learning_rate": 3.389065774947601e-06, | |
| "loss": 5.087, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.017119972608043828, | |
| "grad_norm": 54.484527587890625, | |
| "learning_rate": 3.421065262955793e-06, | |
| "loss": 5.082, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.017279972352044235, | |
| "grad_norm": 74.30866241455078, | |
| "learning_rate": 3.4530647509639847e-06, | |
| "loss": 4.9111, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.017439972096044646, | |
| "grad_norm": 60.489505767822266, | |
| "learning_rate": 3.4850642389721765e-06, | |
| "loss": 5.0213, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.017599971840045057, | |
| "grad_norm": 61.25093460083008, | |
| "learning_rate": 3.5170637269803687e-06, | |
| "loss": 4.9898, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.017759971584045464, | |
| "grad_norm": 51.98568344116211, | |
| "learning_rate": 3.5490632149885605e-06, | |
| "loss": 4.7734, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.017919971328045875, | |
| "grad_norm": 64.08167266845703, | |
| "learning_rate": 3.581062702996752e-06, | |
| "loss": 4.9511, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.018079971072046286, | |
| "grad_norm": 61.8354606628418, | |
| "learning_rate": 3.613062191004944e-06, | |
| "loss": 5.0481, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.018239970816046693, | |
| "grad_norm": 97.53675842285156, | |
| "learning_rate": 3.645061679013136e-06, | |
| "loss": 4.8441, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.018399970560047104, | |
| "grad_norm": 49.35017013549805, | |
| "learning_rate": 3.677061167021328e-06, | |
| "loss": 4.873, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.018559970304047515, | |
| "grad_norm": 44.33409118652344, | |
| "learning_rate": 3.70906065502952e-06, | |
| "loss": 4.9988, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.018719970048047922, | |
| "grad_norm": 140.5505828857422, | |
| "learning_rate": 3.741060143037712e-06, | |
| "loss": 4.7653, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.018879969792048333, | |
| "grad_norm": 68.21163177490234, | |
| "learning_rate": 3.7730596310459034e-06, | |
| "loss": 4.804, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.019039969536048744, | |
| "grad_norm": 48.678226470947266, | |
| "learning_rate": 3.805059119054095e-06, | |
| "loss": 4.8288, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.01919996928004915, | |
| "grad_norm": 76.32611083984375, | |
| "learning_rate": 3.837058607062287e-06, | |
| "loss": 4.7053, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.019359969024049562, | |
| "grad_norm": 70.85586547851562, | |
| "learning_rate": 3.869058095070479e-06, | |
| "loss": 4.6887, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.01951996876804997, | |
| "grad_norm": 66.46036529541016, | |
| "learning_rate": 3.901057583078671e-06, | |
| "loss": 4.7832, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.01967996851205038, | |
| "grad_norm": 165.13221740722656, | |
| "learning_rate": 3.9330570710868636e-06, | |
| "loss": 4.6817, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.01983996825605079, | |
| "grad_norm": 118.48895263671875, | |
| "learning_rate": 3.965056559095055e-06, | |
| "loss": 4.6252, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.0199999680000512, | |
| "grad_norm": 64.3436050415039, | |
| "learning_rate": 3.997056047103246e-06, | |
| "loss": 4.5936, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.02015996774405161, | |
| "grad_norm": 42.27592468261719, | |
| "learning_rate": 4.0290555351114385e-06, | |
| "loss": 4.7452, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.02031996748805202, | |
| "grad_norm": 60.829036712646484, | |
| "learning_rate": 4.061055023119631e-06, | |
| "loss": 4.5321, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.020479967232052428, | |
| "grad_norm": 161.975830078125, | |
| "learning_rate": 4.093054511127823e-06, | |
| "loss": 4.4964, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.02063996697605284, | |
| "grad_norm": 99.2963638305664, | |
| "learning_rate": 4.125053999136014e-06, | |
| "loss": 4.4421, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.02079996672005325, | |
| "grad_norm": 68.78880310058594, | |
| "learning_rate": 4.156733492264124e-06, | |
| "loss": 4.3782, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.020959966464053657, | |
| "grad_norm": 80.74951171875, | |
| "learning_rate": 4.188732980272316e-06, | |
| "loss": 4.5169, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.021119966208054067, | |
| "grad_norm": 157.87254333496094, | |
| "learning_rate": 4.220412473400426e-06, | |
| "loss": 4.533, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.02127996595205448, | |
| "grad_norm": 148.68331909179688, | |
| "learning_rate": 4.252411961408618e-06, | |
| "loss": 4.3725, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.021439965696054886, | |
| "grad_norm": 72.9531021118164, | |
| "learning_rate": 4.28441144941681e-06, | |
| "loss": 4.2911, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.021599965440055297, | |
| "grad_norm": 73.24847412109375, | |
| "learning_rate": 4.316410937425001e-06, | |
| "loss": 4.2261, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.021759965184055704, | |
| "grad_norm": 94.57313537597656, | |
| "learning_rate": 4.348410425433194e-06, | |
| "loss": 4.2467, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.021919964928056115, | |
| "grad_norm": 105.674560546875, | |
| "learning_rate": 4.380409913441385e-06, | |
| "loss": 4.1558, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.022079964672056526, | |
| "grad_norm": 63.658287048339844, | |
| "learning_rate": 4.412409401449577e-06, | |
| "loss": 4.2794, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.022239964416056933, | |
| "grad_norm": 77.69287109375, | |
| "learning_rate": 4.444408889457769e-06, | |
| "loss": 4.2383, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.022399964160057344, | |
| "grad_norm": 82.83360290527344, | |
| "learning_rate": 4.4764083774659615e-06, | |
| "loss": 4.1654, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.022559963904057755, | |
| "grad_norm": 47.373531341552734, | |
| "learning_rate": 4.508407865474153e-06, | |
| "loss": 4.158, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 0.022719963648058162, | |
| "grad_norm": 97.64757537841797, | |
| "learning_rate": 4.540407353482344e-06, | |
| "loss": 4.1299, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 0.022879963392058573, | |
| "grad_norm": 54.75618362426758, | |
| "learning_rate": 4.5724068414905365e-06, | |
| "loss": 4.1902, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 0.023039963136058984, | |
| "grad_norm": 258.4887390136719, | |
| "learning_rate": 4.604406329498729e-06, | |
| "loss": 3.7853, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 0.02319996288005939, | |
| "grad_norm": 104.63798522949219, | |
| "learning_rate": 4.63640581750692e-06, | |
| "loss": 4.0514, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.023359962624059802, | |
| "grad_norm": 60.090843200683594, | |
| "learning_rate": 4.668405305515112e-06, | |
| "loss": 4.1655, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 0.023519962368060213, | |
| "grad_norm": 44.36670684814453, | |
| "learning_rate": 4.7004047935233036e-06, | |
| "loss": 4.051, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 0.02367996211206062, | |
| "grad_norm": 41.61213302612305, | |
| "learning_rate": 4.732404281531496e-06, | |
| "loss": 4.078, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 0.02383996185606103, | |
| "grad_norm": 73.2448501586914, | |
| "learning_rate": 4.764403769539688e-06, | |
| "loss": 4.1193, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 0.023999961600061438, | |
| "grad_norm": 77.30301666259766, | |
| "learning_rate": 4.796403257547879e-06, | |
| "loss": 4.1536, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.02415996134406185, | |
| "grad_norm": 48.1458854675293, | |
| "learning_rate": 4.8284027455560715e-06, | |
| "loss": 3.935, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 0.02431996108806226, | |
| "grad_norm": 129.59295654296875, | |
| "learning_rate": 4.860402233564263e-06, | |
| "loss": 3.9535, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 0.024479960832062667, | |
| "grad_norm": 163.0813751220703, | |
| "learning_rate": 4.892401721572455e-06, | |
| "loss": 3.7051, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 0.024639960576063078, | |
| "grad_norm": 102.2786865234375, | |
| "learning_rate": 4.924401209580647e-06, | |
| "loss": 3.8329, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 0.02479996032006349, | |
| "grad_norm": 160.66392517089844, | |
| "learning_rate": 4.956400697588839e-06, | |
| "loss": 3.9412, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.024959960064063896, | |
| "grad_norm": 136.77218627929688, | |
| "learning_rate": 4.988400185597031e-06, | |
| "loss": 3.6668, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 0.025119959808064307, | |
| "grad_norm": 63.87991714477539, | |
| "learning_rate": 5.0200796787251404e-06, | |
| "loss": 3.7758, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 0.025279959552064718, | |
| "grad_norm": 352.977294921875, | |
| "learning_rate": 5.052079166733333e-06, | |
| "loss": 3.8805, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 0.025439959296065125, | |
| "grad_norm": 148.54776000976562, | |
| "learning_rate": 5.084078654741524e-06, | |
| "loss": 3.8848, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 0.025599959040065536, | |
| "grad_norm": 105.01113891601562, | |
| "learning_rate": 5.116078142749716e-06, | |
| "loss": 3.75, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.025759958784065947, | |
| "grad_norm": 170.62828063964844, | |
| "learning_rate": 5.148077630757908e-06, | |
| "loss": 3.5685, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 0.025919958528066354, | |
| "grad_norm": 164.85324096679688, | |
| "learning_rate": 5.180077118766101e-06, | |
| "loss": 3.7016, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 0.026079958272066765, | |
| "grad_norm": 79.85810852050781, | |
| "learning_rate": 5.212076606774292e-06, | |
| "loss": 4.0955, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 0.026239958016067173, | |
| "grad_norm": 109.73529815673828, | |
| "learning_rate": 5.244076094782484e-06, | |
| "loss": 3.7577, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 0.026399957760067583, | |
| "grad_norm": 105.98066711425781, | |
| "learning_rate": 5.276075582790676e-06, | |
| "loss": 3.7485, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.026559957504067994, | |
| "grad_norm": 71.02545166015625, | |
| "learning_rate": 5.3080750707988686e-06, | |
| "loss": 3.8263, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 0.0267199572480684, | |
| "grad_norm": 245.44224548339844, | |
| "learning_rate": 5.340074558807059e-06, | |
| "loss": 3.6922, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 0.026879956992068813, | |
| "grad_norm": 42.178157806396484, | |
| "learning_rate": 5.372074046815251e-06, | |
| "loss": 3.6568, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 0.027039956736069223, | |
| "grad_norm": 114.55894470214844, | |
| "learning_rate": 5.404073534823443e-06, | |
| "loss": 3.7317, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 0.02719995648006963, | |
| "grad_norm": 86.70626831054688, | |
| "learning_rate": 5.436073022831635e-06, | |
| "loss": 3.5089, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.02735995622407004, | |
| "grad_norm": 202.02505493164062, | |
| "learning_rate": 5.468072510839827e-06, | |
| "loss": 3.7377, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 0.027519955968070452, | |
| "grad_norm": 114.00701141357422, | |
| "learning_rate": 5.500071998848019e-06, | |
| "loss": 3.6206, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 0.02767995571207086, | |
| "grad_norm": 152.38311767578125, | |
| "learning_rate": 5.532071486856211e-06, | |
| "loss": 3.3702, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 0.02783995545607127, | |
| "grad_norm": 156.1048126220703, | |
| "learning_rate": 5.564070974864403e-06, | |
| "loss": 3.5126, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 0.02799995520007168, | |
| "grad_norm": 117.87386322021484, | |
| "learning_rate": 5.596070462872595e-06, | |
| "loss": 3.4841, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.02815995494407209, | |
| "grad_norm": 616.7991333007812, | |
| "learning_rate": 5.628069950880786e-06, | |
| "loss": 3.1464, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 0.0283199546880725, | |
| "grad_norm": 131.32760620117188, | |
| "learning_rate": 5.6600694388889786e-06, | |
| "loss": 3.7012, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 0.028479954432072907, | |
| "grad_norm": 60.172969818115234, | |
| "learning_rate": 5.69206892689717e-06, | |
| "loss": 3.5802, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 0.028639954176073318, | |
| "grad_norm": 169.24374389648438, | |
| "learning_rate": 5.724068414905361e-06, | |
| "loss": 3.4952, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 0.02879995392007373, | |
| "grad_norm": 158.77391052246094, | |
| "learning_rate": 5.7560679029135535e-06, | |
| "loss": 3.1174, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.028959953664074136, | |
| "grad_norm": 218.98867797851562, | |
| "learning_rate": 5.787747396041664e-06, | |
| "loss": 3.3134, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 0.029119953408074547, | |
| "grad_norm": 185.3249053955078, | |
| "learning_rate": 5.819746884049856e-06, | |
| "loss": 3.3578, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 0.029279953152074958, | |
| "grad_norm": 93.69242858886719, | |
| "learning_rate": 5.851746372058048e-06, | |
| "loss": 3.0209, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 0.029439952896075365, | |
| "grad_norm": 85.82784271240234, | |
| "learning_rate": 5.883745860066239e-06, | |
| "loss": 3.3796, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 0.029599952640075776, | |
| "grad_norm": 125.96697998046875, | |
| "learning_rate": 5.915745348074431e-06, | |
| "loss": 3.2287, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.029759952384076187, | |
| "grad_norm": 235.71075439453125, | |
| "learning_rate": 5.947744836082623e-06, | |
| "loss": 3.1537, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 0.029919952128076594, | |
| "grad_norm": 139.5558319091797, | |
| "learning_rate": 5.979744324090815e-06, | |
| "loss": 2.9073, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 0.030079951872077005, | |
| "grad_norm": 204.2928924560547, | |
| "learning_rate": 6.011743812099007e-06, | |
| "loss": 3.3444, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 0.030239951616077416, | |
| "grad_norm": 165.4457244873047, | |
| "learning_rate": 6.043743300107199e-06, | |
| "loss": 3.1341, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 0.030399951360077823, | |
| "grad_norm": 66.5983657836914, | |
| "learning_rate": 6.07574278811539e-06, | |
| "loss": 2.8862, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.030559951104078234, | |
| "grad_norm": 219.95774841308594, | |
| "learning_rate": 6.1077422761235826e-06, | |
| "loss": 3.2033, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 0.03071995084807864, | |
| "grad_norm": 125.15766906738281, | |
| "learning_rate": 6.139741764131775e-06, | |
| "loss": 3.2764, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 0.030879950592079052, | |
| "grad_norm": 207.95970153808594, | |
| "learning_rate": 6.171741252139967e-06, | |
| "loss": 3.0725, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 0.031039950336079463, | |
| "grad_norm": 368.32781982421875, | |
| "learning_rate": 6.203740740148158e-06, | |
| "loss": 3.0436, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 0.03119995008007987, | |
| "grad_norm": 412.2764587402344, | |
| "learning_rate": 6.23574022815635e-06, | |
| "loss": 3.3493, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.03135994982408028, | |
| "grad_norm": 155.46766662597656, | |
| "learning_rate": 6.267739716164542e-06, | |
| "loss": 3.0141, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 0.03151994956808069, | |
| "grad_norm": 89.32569885253906, | |
| "learning_rate": 6.299739204172733e-06, | |
| "loss": 2.779, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 0.0316799493120811, | |
| "grad_norm": 241.4378204345703, | |
| "learning_rate": 6.3317386921809254e-06, | |
| "loss": 3.3543, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 0.03183994905608151, | |
| "grad_norm": 13.20569133758545, | |
| "learning_rate": 6.363738180189118e-06, | |
| "loss": 3.1526, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 0.03199994880008192, | |
| "grad_norm": 270.6402893066406, | |
| "learning_rate": 6.395737668197309e-06, | |
| "loss": 2.7896, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.03215994854408233, | |
| "grad_norm": 106.38632202148438, | |
| "learning_rate": 6.427737156205501e-06, | |
| "loss": 2.9398, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 0.03231994828808274, | |
| "grad_norm": 191.7210693359375, | |
| "learning_rate": 6.459416649333611e-06, | |
| "loss": 3.1254, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 0.03247994803208315, | |
| "grad_norm": 143.96151733398438, | |
| "learning_rate": 6.491416137341803e-06, | |
| "loss": 2.8832, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 0.03263994777608356, | |
| "grad_norm": 150.26368713378906, | |
| "learning_rate": 6.523415625349994e-06, | |
| "loss": 3.0542, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 0.032799947520083965, | |
| "grad_norm": 178.11705017089844, | |
| "learning_rate": 6.5554151133581865e-06, | |
| "loss": 2.9722, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.032959947264084376, | |
| "grad_norm": 222.4794921875, | |
| "learning_rate": 6.587414601366379e-06, | |
| "loss": 2.9321, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 0.03311994700808479, | |
| "grad_norm": 155.37796020507812, | |
| "learning_rate": 6.619414089374571e-06, | |
| "loss": 2.6448, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 0.0332799467520852, | |
| "grad_norm": 155.5786590576172, | |
| "learning_rate": 6.651413577382762e-06, | |
| "loss": 3.4006, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 0.03343994649608561, | |
| "grad_norm": 684.525146484375, | |
| "learning_rate": 6.6834130653909545e-06, | |
| "loss": 3.0022, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 0.03359994624008602, | |
| "grad_norm": 545.5623168945312, | |
| "learning_rate": 6.715412553399147e-06, | |
| "loss": 2.6366, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.03375994598408642, | |
| "grad_norm": 292.9093017578125, | |
| "learning_rate": 6.747412041407339e-06, | |
| "loss": 3.0112, | |
| "step": 21100 | |
| }, | |
| { | |
| "epoch": 0.033919945728086834, | |
| "grad_norm": 2.531680107116699, | |
| "learning_rate": 6.7794115294155294e-06, | |
| "loss": 2.7856, | |
| "step": 21200 | |
| }, | |
| { | |
| "epoch": 0.034079945472087245, | |
| "grad_norm": 216.7860565185547, | |
| "learning_rate": 6.811411017423722e-06, | |
| "loss": 3.0967, | |
| "step": 21300 | |
| }, | |
| { | |
| "epoch": 0.034239945216087656, | |
| "grad_norm": 138.73028564453125, | |
| "learning_rate": 6.843410505431913e-06, | |
| "loss": 2.8754, | |
| "step": 21400 | |
| }, | |
| { | |
| "epoch": 0.034399944960088066, | |
| "grad_norm": 78.2362060546875, | |
| "learning_rate": 6.875409993440105e-06, | |
| "loss": 3.1269, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.03455994470408847, | |
| "grad_norm": 144.1228790283203, | |
| "learning_rate": 6.907409481448297e-06, | |
| "loss": 2.8235, | |
| "step": 21600 | |
| }, | |
| { | |
| "epoch": 0.03471994444808888, | |
| "grad_norm": 275.1159973144531, | |
| "learning_rate": 6.93940896945649e-06, | |
| "loss": 2.4912, | |
| "step": 21700 | |
| }, | |
| { | |
| "epoch": 0.03487994419208929, | |
| "grad_norm": 216.12060546875, | |
| "learning_rate": 6.971408457464681e-06, | |
| "loss": 2.5079, | |
| "step": 21800 | |
| }, | |
| { | |
| "epoch": 0.0350399439360897, | |
| "grad_norm": 398.5049133300781, | |
| "learning_rate": 7.003407945472873e-06, | |
| "loss": 3.2942, | |
| "step": 21900 | |
| }, | |
| { | |
| "epoch": 0.035199943680090114, | |
| "grad_norm": 116.13761901855469, | |
| "learning_rate": 7.035407433481065e-06, | |
| "loss": 2.4184, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.035359943424090524, | |
| "grad_norm": 425.1556091308594, | |
| "learning_rate": 7.067406921489257e-06, | |
| "loss": 2.782, | |
| "step": 22100 | |
| }, | |
| { | |
| "epoch": 0.03551994316809093, | |
| "grad_norm": 17.029335021972656, | |
| "learning_rate": 7.099086414617366e-06, | |
| "loss": 2.7652, | |
| "step": 22200 | |
| }, | |
| { | |
| "epoch": 0.03567994291209134, | |
| "grad_norm": 307.45343017578125, | |
| "learning_rate": 7.1310859026255585e-06, | |
| "loss": 3.113, | |
| "step": 22300 | |
| }, | |
| { | |
| "epoch": 0.03583994265609175, | |
| "grad_norm": 69.89311981201172, | |
| "learning_rate": 7.163085390633751e-06, | |
| "loss": 2.7451, | |
| "step": 22400 | |
| }, | |
| { | |
| "epoch": 0.03599994240009216, | |
| "grad_norm": 28.0865535736084, | |
| "learning_rate": 7.195084878641943e-06, | |
| "loss": 2.7473, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.03615994214409257, | |
| "grad_norm": 108.03202056884766, | |
| "learning_rate": 7.227084366650134e-06, | |
| "loss": 2.5116, | |
| "step": 22600 | |
| }, | |
| { | |
| "epoch": 0.03631994188809298, | |
| "grad_norm": 299.888427734375, | |
| "learning_rate": 7.2590838546583265e-06, | |
| "loss": 2.8531, | |
| "step": 22700 | |
| }, | |
| { | |
| "epoch": 0.036479941632093386, | |
| "grad_norm": 87.79664611816406, | |
| "learning_rate": 7.291083342666519e-06, | |
| "loss": 2.9171, | |
| "step": 22800 | |
| }, | |
| { | |
| "epoch": 0.0366399413760938, | |
| "grad_norm": 388.6871337890625, | |
| "learning_rate": 7.323082830674709e-06, | |
| "loss": 2.7954, | |
| "step": 22900 | |
| }, | |
| { | |
| "epoch": 0.03679994112009421, | |
| "grad_norm": 87.27410888671875, | |
| "learning_rate": 7.355082318682901e-06, | |
| "loss": 2.5376, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.03695994086409462, | |
| "grad_norm": 159.74534606933594, | |
| "learning_rate": 7.387081806691094e-06, | |
| "loss": 3.2488, | |
| "step": 23100 | |
| }, | |
| { | |
| "epoch": 0.03711994060809503, | |
| "grad_norm": 169.96243286132812, | |
| "learning_rate": 7.419081294699285e-06, | |
| "loss": 2.6131, | |
| "step": 23200 | |
| }, | |
| { | |
| "epoch": 0.037279940352095434, | |
| "grad_norm": 221.1896514892578, | |
| "learning_rate": 7.451080782707477e-06, | |
| "loss": 3.1343, | |
| "step": 23300 | |
| }, | |
| { | |
| "epoch": 0.037439940096095845, | |
| "grad_norm": 67.28482818603516, | |
| "learning_rate": 7.482760275835588e-06, | |
| "loss": 2.3159, | |
| "step": 23400 | |
| }, | |
| { | |
| "epoch": 0.037599939840096255, | |
| "grad_norm": 341.05975341796875, | |
| "learning_rate": 7.514759763843779e-06, | |
| "loss": 2.4225, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.037759939584096666, | |
| "grad_norm": 250.44683837890625, | |
| "learning_rate": 7.54675925185197e-06, | |
| "loss": 2.5034, | |
| "step": 23600 | |
| }, | |
| { | |
| "epoch": 0.03791993932809708, | |
| "grad_norm": 423.6518249511719, | |
| "learning_rate": 7.5787587398601625e-06, | |
| "loss": 3.0067, | |
| "step": 23700 | |
| }, | |
| { | |
| "epoch": 0.03807993907209749, | |
| "grad_norm": 169.45944213867188, | |
| "learning_rate": 7.610758227868355e-06, | |
| "loss": 2.313, | |
| "step": 23800 | |
| }, | |
| { | |
| "epoch": 0.03823993881609789, | |
| "grad_norm": 80.43399047851562, | |
| "learning_rate": 7.642757715876546e-06, | |
| "loss": 2.5363, | |
| "step": 23900 | |
| }, | |
| { | |
| "epoch": 0.0383999385600983, | |
| "grad_norm": 248.08848571777344, | |
| "learning_rate": 7.674757203884739e-06, | |
| "loss": 2.7929, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.03855993830409871, | |
| "grad_norm": 3.7647440433502197, | |
| "learning_rate": 7.70675669189293e-06, | |
| "loss": 2.617, | |
| "step": 24100 | |
| }, | |
| { | |
| "epoch": 0.038719938048099124, | |
| "grad_norm": 3.100020170211792, | |
| "learning_rate": 7.738756179901122e-06, | |
| "loss": 2.9711, | |
| "step": 24200 | |
| }, | |
| { | |
| "epoch": 0.038879937792099535, | |
| "grad_norm": 69.79640197753906, | |
| "learning_rate": 7.770755667909315e-06, | |
| "loss": 2.7726, | |
| "step": 24300 | |
| }, | |
| { | |
| "epoch": 0.03903993753609994, | |
| "grad_norm": 190.2179412841797, | |
| "learning_rate": 7.802755155917506e-06, | |
| "loss": 2.5849, | |
| "step": 24400 | |
| }, | |
| { | |
| "epoch": 0.03919993728010035, | |
| "grad_norm": 75.47491455078125, | |
| "learning_rate": 7.834754643925698e-06, | |
| "loss": 2.3231, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.03935993702410076, | |
| "grad_norm": 13.3529691696167, | |
| "learning_rate": 7.866754131933889e-06, | |
| "loss": 2.2477, | |
| "step": 24600 | |
| }, | |
| { | |
| "epoch": 0.03951993676810117, | |
| "grad_norm": 280.162109375, | |
| "learning_rate": 7.89875361994208e-06, | |
| "loss": 2.5487, | |
| "step": 24700 | |
| }, | |
| { | |
| "epoch": 0.03967993651210158, | |
| "grad_norm": 376.9624938964844, | |
| "learning_rate": 7.930753107950273e-06, | |
| "loss": 2.5175, | |
| "step": 24800 | |
| }, | |
| { | |
| "epoch": 0.03983993625610199, | |
| "grad_norm": 341.099609375, | |
| "learning_rate": 7.962752595958465e-06, | |
| "loss": 2.6758, | |
| "step": 24900 | |
| }, | |
| { | |
| "epoch": 0.0399999360001024, | |
| "grad_norm": 436.5195007324219, | |
| "learning_rate": 7.994752083966658e-06, | |
| "loss": 2.7313, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.04015993574410281, | |
| "grad_norm": 274.91363525390625, | |
| "learning_rate": 8.026751571974849e-06, | |
| "loss": 2.4846, | |
| "step": 25100 | |
| }, | |
| { | |
| "epoch": 0.04031993548810322, | |
| "grad_norm": 183.5716094970703, | |
| "learning_rate": 8.05875105998304e-06, | |
| "loss": 2.8697, | |
| "step": 25200 | |
| }, | |
| { | |
| "epoch": 0.04047993523210363, | |
| "grad_norm": 70.23844909667969, | |
| "learning_rate": 8.090750547991234e-06, | |
| "loss": 2.5289, | |
| "step": 25300 | |
| }, | |
| { | |
| "epoch": 0.04063993497610404, | |
| "grad_norm": 139.3669891357422, | |
| "learning_rate": 8.122750035999425e-06, | |
| "loss": 2.235, | |
| "step": 25400 | |
| }, | |
| { | |
| "epoch": 0.04079993472010445, | |
| "grad_norm": 242.79315185546875, | |
| "learning_rate": 8.154429529127534e-06, | |
| "loss": 2.5028, | |
| "step": 25500 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 625001, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |