{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.8919350295746878, "eval_steps": 5000, "global_step": 9500, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.009388789784996713, "grad_norm": 87.77811431884766, "learning_rate": 9.009854528390429e-07, "loss": 16.2337, "step": 100 }, { "epoch": 0.018777579569993427, "grad_norm": 85.86430358886719, "learning_rate": 1.8395119662130456e-06, "loss": 13.5901, "step": 200 }, { "epoch": 0.02816636935499014, "grad_norm": 14.885255813598633, "learning_rate": 2.7592679493195683e-06, "loss": 9.8565, "step": 300 }, { "epoch": 0.03755515913998685, "grad_norm": 6.9691972732543945, "learning_rate": 3.6977944626935713e-06, "loss": 8.3332, "step": 400 }, { "epoch": 0.04694394892498357, "grad_norm": 5.612818241119385, "learning_rate": 4.6363209760675744e-06, "loss": 8.1261, "step": 500 }, { "epoch": 0.05633273870998028, "grad_norm": 4.705409526824951, "learning_rate": 5.574847489441577e-06, "loss": 8.0697, "step": 600 }, { "epoch": 0.06572152849497699, "grad_norm": 4.337332725524902, "learning_rate": 6.51337400281558e-06, "loss": 8.0298, "step": 700 }, { "epoch": 0.0751103182799737, "grad_norm": 3.6314213275909424, "learning_rate": 7.451900516189583e-06, "loss": 8.033, "step": 800 }, { "epoch": 0.08449910806497042, "grad_norm": 3.4845075607299805, "learning_rate": 8.390427029563585e-06, "loss": 7.9858, "step": 900 }, { "epoch": 0.09388789784996714, "grad_norm": 5.188210487365723, "learning_rate": 9.328953542937589e-06, "loss": 8.012, "step": 1000 }, { "epoch": 0.10327668763496385, "grad_norm": 3.0830442905426025, "learning_rate": 1.0267480056311592e-05, "loss": 7.9745, "step": 1100 }, { "epoch": 0.11266547741996057, "grad_norm": 3.4729278087615967, "learning_rate": 1.1206006569685594e-05, "loss": 8.0091, "step": 1200 }, { "epoch": 0.12205426720495728, "grad_norm": 2.329235076904297, "learning_rate": 1.2144533083059597e-05, "loss": 8.0221, "step": 1300 }, { "epoch": 0.13144305698995398, "grad_norm": 2.7225279808044434, "learning_rate": 1.3083059596433601e-05, "loss": 7.9583, "step": 1400 }, { "epoch": 0.1408318467749507, "grad_norm": 2.012805938720703, "learning_rate": 1.4021586109807603e-05, "loss": 8.0031, "step": 1500 }, { "epoch": 0.1502206365599474, "grad_norm": 2.9397523403167725, "learning_rate": 1.4960112623181606e-05, "loss": 7.9985, "step": 1600 }, { "epoch": 0.15960942634494413, "grad_norm": 2.356337308883667, "learning_rate": 1.589863913655561e-05, "loss": 7.9647, "step": 1700 }, { "epoch": 0.16899821612994084, "grad_norm": 2.6846818923950195, "learning_rate": 1.6837165649929613e-05, "loss": 7.9857, "step": 1800 }, { "epoch": 0.17838700591493756, "grad_norm": 2.0188565254211426, "learning_rate": 1.7775692163303613e-05, "loss": 7.9806, "step": 1900 }, { "epoch": 0.18777579569993427, "grad_norm": 4.030488014221191, "learning_rate": 1.8714218676677617e-05, "loss": 7.9761, "step": 2000 }, { "epoch": 0.197164585484931, "grad_norm": 4.183101654052734, "learning_rate": 1.965274519005162e-05, "loss": 7.9696, "step": 2100 }, { "epoch": 0.2065533752699277, "grad_norm": 1.4769889116287231, "learning_rate": 1.9934275728965626e-05, "loss": 8.0014, "step": 2200 }, { "epoch": 0.21594216505492442, "grad_norm": 2.1914358139038086, "learning_rate": 1.9829951489228525e-05, "loss": 7.9546, "step": 2300 }, { "epoch": 0.22533095483992113, "grad_norm": 22.55516815185547, "learning_rate": 1.972562724949142e-05, "loss": 7.9874, "step": 2400 }, { "epoch": 0.23471974462491785, "grad_norm": 1.635116457939148, "learning_rate": 1.962130300975432e-05, "loss": 7.9846, "step": 2500 }, { "epoch": 0.24410853440991456, "grad_norm": 5.707275390625, "learning_rate": 1.9516978770017215e-05, "loss": 7.9664, "step": 2600 }, { "epoch": 0.2534973241949113, "grad_norm": 4.194604396820068, "learning_rate": 1.9412654530280113e-05, "loss": 7.9725, "step": 2700 }, { "epoch": 0.26288611397990796, "grad_norm": 2.0074055194854736, "learning_rate": 1.930833029054301e-05, "loss": 7.9419, "step": 2800 }, { "epoch": 0.2722749037649047, "grad_norm": 26.4300479888916, "learning_rate": 1.9204006050805904e-05, "loss": 7.9786, "step": 2900 }, { "epoch": 0.2816636935499014, "grad_norm": 2.5870931148529053, "learning_rate": 1.9099681811068803e-05, "loss": 7.9479, "step": 3000 }, { "epoch": 0.29105248333489814, "grad_norm": 1.6209933757781982, "learning_rate": 1.8995357571331702e-05, "loss": 7.9526, "step": 3100 }, { "epoch": 0.3004412731198948, "grad_norm": 19.398080825805664, "learning_rate": 1.8891033331594598e-05, "loss": 7.9613, "step": 3200 }, { "epoch": 0.30983006290489157, "grad_norm": 2.124729871749878, "learning_rate": 1.8786709091857496e-05, "loss": 7.9994, "step": 3300 }, { "epoch": 0.31921885268988826, "grad_norm": 2.701019763946533, "learning_rate": 1.8682384852120392e-05, "loss": 7.9464, "step": 3400 }, { "epoch": 0.328607642474885, "grad_norm": 1.944600224494934, "learning_rate": 1.8578060612383287e-05, "loss": 7.9429, "step": 3500 }, { "epoch": 0.3379964322598817, "grad_norm": 41.85493087768555, "learning_rate": 1.8473736372646186e-05, "loss": 7.9539, "step": 3600 }, { "epoch": 0.34738522204487843, "grad_norm": 2.024019479751587, "learning_rate": 1.8369412132909085e-05, "loss": 7.9699, "step": 3700 }, { "epoch": 0.3567740118298751, "grad_norm": 2.2833781242370605, "learning_rate": 1.826508789317198e-05, "loss": 7.9144, "step": 3800 }, { "epoch": 0.36616280161487186, "grad_norm": 3.1666574478149414, "learning_rate": 1.8160763653434876e-05, "loss": 7.9424, "step": 3900 }, { "epoch": 0.37555159139986855, "grad_norm": 1.7259443998336792, "learning_rate": 1.8056439413697775e-05, "loss": 7.9361, "step": 4000 }, { "epoch": 0.3849403811848653, "grad_norm": 2.5588905811309814, "learning_rate": 1.795211517396067e-05, "loss": 7.9144, "step": 4100 }, { "epoch": 0.394329170969862, "grad_norm": 2.542963743209839, "learning_rate": 1.784779093422357e-05, "loss": 7.907, "step": 4200 }, { "epoch": 0.4037179607548587, "grad_norm": 2.755725622177124, "learning_rate": 1.7743466694486468e-05, "loss": 7.9049, "step": 4300 }, { "epoch": 0.4131067505398554, "grad_norm": 2.6067683696746826, "learning_rate": 1.7639142454749364e-05, "loss": 7.939, "step": 4400 }, { "epoch": 0.42249554032485215, "grad_norm": 2.614475965499878, "learning_rate": 1.753481821501226e-05, "loss": 7.9067, "step": 4500 }, { "epoch": 0.43188433010984884, "grad_norm": 2.172943353652954, "learning_rate": 1.7430493975275155e-05, "loss": 7.9149, "step": 4600 }, { "epoch": 0.4412731198948456, "grad_norm": 6.292716979980469, "learning_rate": 1.7326169735538053e-05, "loss": 7.9705, "step": 4700 }, { "epoch": 0.45066190967984227, "grad_norm": 2.595337152481079, "learning_rate": 1.7221845495800952e-05, "loss": 7.8992, "step": 4800 }, { "epoch": 0.460050699464839, "grad_norm": 5.529814720153809, "learning_rate": 1.7117521256063848e-05, "loss": 7.9077, "step": 4900 }, { "epoch": 0.4694394892498357, "grad_norm": 6.696155548095703, "learning_rate": 1.7013197016326747e-05, "loss": 7.8992, "step": 5000 }, { "epoch": 0.4694394892498357, "eval_loss": 7.916718006134033, "eval_runtime": 334.3618, "eval_samples_per_second": 1019.336, "eval_steps_per_second": 7.964, "step": 5000 }, { "epoch": 0.4788282790348324, "grad_norm": 26.500988006591797, "learning_rate": 1.6908872776589642e-05, "loss": 7.914, "step": 5100 }, { "epoch": 0.4882170688198291, "grad_norm": 2.454939126968384, "learning_rate": 1.6804548536852537e-05, "loss": 7.8913, "step": 5200 }, { "epoch": 0.4976058586048258, "grad_norm": 1.8935959339141846, "learning_rate": 1.6700224297115436e-05, "loss": 7.8999, "step": 5300 }, { "epoch": 0.5069946483898226, "grad_norm": 2.2066667079925537, "learning_rate": 1.6595900057378335e-05, "loss": 7.8818, "step": 5400 }, { "epoch": 0.5163834381748192, "grad_norm": 2.7672908306121826, "learning_rate": 1.649157581764123e-05, "loss": 7.9383, "step": 5500 }, { "epoch": 0.5257722279598159, "grad_norm": 4.411477088928223, "learning_rate": 1.6387251577904126e-05, "loss": 7.9094, "step": 5600 }, { "epoch": 0.5351610177448127, "grad_norm": 50.536991119384766, "learning_rate": 1.6282927338167025e-05, "loss": 7.8986, "step": 5700 }, { "epoch": 0.5445498075298094, "grad_norm": 2.9657583236694336, "learning_rate": 1.617860309842992e-05, "loss": 7.9015, "step": 5800 }, { "epoch": 0.5539385973148061, "grad_norm": 1.910575270652771, "learning_rate": 1.607427885869282e-05, "loss": 7.9059, "step": 5900 }, { "epoch": 0.5633273870998028, "grad_norm": 3.9665298461914062, "learning_rate": 1.5969954618955715e-05, "loss": 7.8524, "step": 6000 }, { "epoch": 0.5727161768847996, "grad_norm": 2.0286688804626465, "learning_rate": 1.5865630379218614e-05, "loss": 7.8788, "step": 6100 }, { "epoch": 0.5821049666697963, "grad_norm": 3.200676202774048, "learning_rate": 1.576130613948151e-05, "loss": 7.8712, "step": 6200 }, { "epoch": 0.591493756454793, "grad_norm": 2.0278351306915283, "learning_rate": 1.5656981899744408e-05, "loss": 7.8967, "step": 6300 }, { "epoch": 0.6008825462397896, "grad_norm": 3.4500420093536377, "learning_rate": 1.5553700902404677e-05, "loss": 7.8677, "step": 6400 }, { "epoch": 0.6102713360247864, "grad_norm": 2.8257575035095215, "learning_rate": 1.5450419905064945e-05, "loss": 7.9132, "step": 6500 }, { "epoch": 0.6196601258097831, "grad_norm": 2.537797212600708, "learning_rate": 1.534609566532784e-05, "loss": 7.853, "step": 6600 }, { "epoch": 0.6290489155947798, "grad_norm": 3.010673999786377, "learning_rate": 1.5241771425590736e-05, "loss": 7.8968, "step": 6700 }, { "epoch": 0.6384377053797765, "grad_norm": 2.7398829460144043, "learning_rate": 1.5137447185853635e-05, "loss": 7.8656, "step": 6800 }, { "epoch": 0.6478264951647733, "grad_norm": 4.402348518371582, "learning_rate": 1.5033122946116532e-05, "loss": 7.8801, "step": 6900 }, { "epoch": 0.65721528494977, "grad_norm": 2.982344388961792, "learning_rate": 1.4928798706379428e-05, "loss": 7.8378, "step": 7000 }, { "epoch": 0.6666040747347667, "grad_norm": 12.262134552001953, "learning_rate": 1.4824474466642325e-05, "loss": 7.8554, "step": 7100 }, { "epoch": 0.6759928645197634, "grad_norm": 3.8007514476776123, "learning_rate": 1.4720150226905224e-05, "loss": 7.8305, "step": 7200 }, { "epoch": 0.6853816543047602, "grad_norm": 3.8391048908233643, "learning_rate": 1.461582598716812e-05, "loss": 7.8613, "step": 7300 }, { "epoch": 0.6947704440897569, "grad_norm": 3.107123374938965, "learning_rate": 1.4511501747431017e-05, "loss": 7.8554, "step": 7400 }, { "epoch": 0.7041592338747535, "grad_norm": 2.5167956352233887, "learning_rate": 1.4407177507693915e-05, "loss": 7.8653, "step": 7500 }, { "epoch": 0.7135480236597502, "grad_norm": 5.4338765144348145, "learning_rate": 1.430285326795681e-05, "loss": 7.8387, "step": 7600 }, { "epoch": 0.7229368134447469, "grad_norm": 3.295238733291626, "learning_rate": 1.4198529028219708e-05, "loss": 7.8513, "step": 7700 }, { "epoch": 0.7323256032297437, "grad_norm": 3.02062726020813, "learning_rate": 1.4094204788482607e-05, "loss": 7.8496, "step": 7800 }, { "epoch": 0.7417143930147404, "grad_norm": 2.2482168674468994, "learning_rate": 1.3989880548745502e-05, "loss": 7.8276, "step": 7900 }, { "epoch": 0.7511031827997371, "grad_norm": 3.394895315170288, "learning_rate": 1.38855563090084e-05, "loss": 7.8353, "step": 8000 }, { "epoch": 0.7604919725847338, "grad_norm": 5.990811824798584, "learning_rate": 1.3781232069271295e-05, "loss": 7.8103, "step": 8100 }, { "epoch": 0.7698807623697306, "grad_norm": 6.237292289733887, "learning_rate": 1.3676907829534194e-05, "loss": 7.8622, "step": 8200 }, { "epoch": 0.7792695521547273, "grad_norm": 3.848353147506714, "learning_rate": 1.3572583589797091e-05, "loss": 7.832, "step": 8300 }, { "epoch": 0.788658341939724, "grad_norm": 3.6994402408599854, "learning_rate": 1.3468259350059986e-05, "loss": 7.8349, "step": 8400 }, { "epoch": 0.7980471317247206, "grad_norm": 2.945556163787842, "learning_rate": 1.3363935110322885e-05, "loss": 7.855, "step": 8500 }, { "epoch": 0.8074359215097174, "grad_norm": 3.0218355655670166, "learning_rate": 1.3259610870585782e-05, "loss": 7.8316, "step": 8600 }, { "epoch": 0.8168247112947141, "grad_norm": 3.8162949085235596, "learning_rate": 1.3155286630848678e-05, "loss": 7.8066, "step": 8700 }, { "epoch": 0.8262135010797108, "grad_norm": 4.187964916229248, "learning_rate": 1.3050962391111577e-05, "loss": 7.8166, "step": 8800 }, { "epoch": 0.8356022908647075, "grad_norm": 3.577139139175415, "learning_rate": 1.2946638151374474e-05, "loss": 7.8588, "step": 8900 }, { "epoch": 0.8449910806497043, "grad_norm": 2.923499345779419, "learning_rate": 1.284231391163737e-05, "loss": 7.8042, "step": 9000 }, { "epoch": 0.854379870434701, "grad_norm": 6.15308952331543, "learning_rate": 1.2737989671900267e-05, "loss": 7.8431, "step": 9100 }, { "epoch": 0.8637686602196977, "grad_norm": 5.1844482421875, "learning_rate": 1.2633665432163165e-05, "loss": 7.7947, "step": 9200 }, { "epoch": 0.8731574500046944, "grad_norm": 3.3482980728149414, "learning_rate": 1.2530384434823432e-05, "loss": 7.8175, "step": 9300 }, { "epoch": 0.8825462397896912, "grad_norm": 3.3958215713500977, "learning_rate": 1.242606019508633e-05, "loss": 7.8299, "step": 9400 }, { "epoch": 0.8919350295746878, "grad_norm": 37.65146255493164, "learning_rate": 1.2321735955349225e-05, "loss": 7.8455, "step": 9500 } ], "logging_steps": 100, "max_steps": 21302, "num_input_tokens_seen": 0, "num_train_epochs": 2, "save_steps": 500, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 0.0, "train_batch_size": 128, "trial_name": null, "trial_params": null }