|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002, |
|
"grad_norm": 226.99563598632812, |
|
"learning_rate": 6.666666666666668e-08, |
|
"loss": 21.9448, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 236.27398681640625, |
|
"learning_rate": 1.3333333333333336e-07, |
|
"loss": 22.6687, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006, |
|
"grad_norm": 243.7906036376953, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 23.7182, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 233.99176025390625, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 22.4062, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 247.73858642578125, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 23.4446, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.012, |
|
"grad_norm": 224.78480529785156, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 22.0816, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014, |
|
"grad_norm": 239.08424377441406, |
|
"learning_rate": 4.666666666666667e-07, |
|
"loss": 22.2544, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 243.3548126220703, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 22.3676, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.018, |
|
"grad_norm": 228.88418579101562, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 21.8324, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 255.19561767578125, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 21.7817, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.022, |
|
"grad_norm": 256.03631591796875, |
|
"learning_rate": 7.333333333333334e-07, |
|
"loss": 22.0901, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 256.8511047363281, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 22.1082, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.026, |
|
"grad_norm": 276.2884521484375, |
|
"learning_rate": 8.666666666666668e-07, |
|
"loss": 20.2445, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.028, |
|
"grad_norm": 274.6839599609375, |
|
"learning_rate": 9.333333333333334e-07, |
|
"loss": 18.7186, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 282.9738464355469, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 19.2151, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 273.26934814453125, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 18.8715, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.034, |
|
"grad_norm": 222.71524047851562, |
|
"learning_rate": 1.1333333333333334e-06, |
|
"loss": 14.6642, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.036, |
|
"grad_norm": 218.20606994628906, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 13.549, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.038, |
|
"grad_norm": 229.51235961914062, |
|
"learning_rate": 1.2666666666666669e-06, |
|
"loss": 13.3927, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 228.68344116210938, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 12.2413, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.042, |
|
"grad_norm": 222.99270629882812, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 11.1357, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.044, |
|
"grad_norm": 236.81912231445312, |
|
"learning_rate": 1.4666666666666669e-06, |
|
"loss": 10.6309, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.046, |
|
"grad_norm": 253.3773193359375, |
|
"learning_rate": 1.5333333333333334e-06, |
|
"loss": 6.1736, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 132.65660095214844, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 3.7401, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 171.21932983398438, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 3.2601, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.052, |
|
"grad_norm": 112.79869079589844, |
|
"learning_rate": 1.7333333333333336e-06, |
|
"loss": 2.6378, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.054, |
|
"grad_norm": 72.08856201171875, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.2122, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.056, |
|
"grad_norm": 47.903202056884766, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 1.9545, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.058, |
|
"grad_norm": 42.68925094604492, |
|
"learning_rate": 1.9333333333333336e-06, |
|
"loss": 1.909, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 37.55009460449219, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.7777, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.062, |
|
"grad_norm": 27.396568298339844, |
|
"learning_rate": 2.0666666666666666e-06, |
|
"loss": 1.8359, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 30.148645401000977, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 1.7342, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.066, |
|
"grad_norm": 28.60604476928711, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.8821, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.068, |
|
"grad_norm": 22.30388641357422, |
|
"learning_rate": 2.266666666666667e-06, |
|
"loss": 1.6301, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 28.496145248413086, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 1.8361, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 34.518287658691406, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.6929, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.074, |
|
"grad_norm": 23.480363845825195, |
|
"learning_rate": 2.466666666666667e-06, |
|
"loss": 1.5912, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.076, |
|
"grad_norm": 26.371000289916992, |
|
"learning_rate": 2.5333333333333338e-06, |
|
"loss": 1.6452, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.078, |
|
"grad_norm": 26.6358585357666, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.7357, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 21.163368225097656, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.8135, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.082, |
|
"grad_norm": 38.06758499145508, |
|
"learning_rate": 2.7333333333333336e-06, |
|
"loss": 1.7483, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.084, |
|
"grad_norm": 20.108041763305664, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.7272, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.086, |
|
"grad_norm": 37.66496658325195, |
|
"learning_rate": 2.866666666666667e-06, |
|
"loss": 1.8263, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.088, |
|
"grad_norm": 17.51531219482422, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 1.7223, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.1541748046875, |
|
"learning_rate": 3e-06, |
|
"loss": 1.6006, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.092, |
|
"grad_norm": 19.2305908203125, |
|
"learning_rate": 3.066666666666667e-06, |
|
"loss": 1.6562, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.094, |
|
"grad_norm": 16.655160903930664, |
|
"learning_rate": 3.133333333333334e-06, |
|
"loss": 1.7491, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 20.247623443603516, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.6449, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.098, |
|
"grad_norm": 17.47651481628418, |
|
"learning_rate": 3.266666666666667e-06, |
|
"loss": 1.6761, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 26.51736068725586, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.7986, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.102, |
|
"grad_norm": 21.225101470947266, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.6329, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.104, |
|
"grad_norm": 22.989187240600586, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 1.7155, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.106, |
|
"grad_norm": 18.516536712646484, |
|
"learning_rate": 3.5333333333333335e-06, |
|
"loss": 1.6831, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.108, |
|
"grad_norm": 19.069442749023438, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.6225, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 20.60556983947754, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 1.6726, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.112, |
|
"grad_norm": 19.45859146118164, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 1.6408, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.114, |
|
"grad_norm": 26.011234283447266, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.6057, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.116, |
|
"grad_norm": 17.240083694458008, |
|
"learning_rate": 3.866666666666667e-06, |
|
"loss": 1.677, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.118, |
|
"grad_norm": 22.089248657226562, |
|
"learning_rate": 3.9333333333333335e-06, |
|
"loss": 1.676, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 33.735233306884766, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.6742, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.122, |
|
"grad_norm": 27.743314743041992, |
|
"learning_rate": 4.066666666666667e-06, |
|
"loss": 1.6956, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.124, |
|
"grad_norm": 25.890151977539062, |
|
"learning_rate": 4.133333333333333e-06, |
|
"loss": 1.56, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.126, |
|
"grad_norm": 16.895370483398438, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.5794, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 19.82956314086914, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 1.4879, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 15.571148872375488, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 1.7724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.132, |
|
"grad_norm": 14.684285163879395, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.6943, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.134, |
|
"grad_norm": 13.911795616149902, |
|
"learning_rate": 4.4666666666666665e-06, |
|
"loss": 1.6627, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.136, |
|
"grad_norm": 15.752901077270508, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 1.7536, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.138, |
|
"grad_norm": 24.22060203552246, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.6225, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 13.675880432128906, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 1.6696, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.142, |
|
"grad_norm": 20.69804573059082, |
|
"learning_rate": 4.7333333333333335e-06, |
|
"loss": 1.6295, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 28.758134841918945, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.6698, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.146, |
|
"grad_norm": 14.652379035949707, |
|
"learning_rate": 4.866666666666667e-06, |
|
"loss": 1.606, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.148, |
|
"grad_norm": 22.868322372436523, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 1.6168, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 14.871725082397461, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6506, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.152, |
|
"grad_norm": 21.552318572998047, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 1.6366, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.154, |
|
"grad_norm": 15.857409477233887, |
|
"learning_rate": 5.133333333333334e-06, |
|
"loss": 1.6794, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.156, |
|
"grad_norm": 17.261104583740234, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.6165, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.158, |
|
"grad_norm": 14.66672420501709, |
|
"learning_rate": 5.2666666666666665e-06, |
|
"loss": 1.6562, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 22.905376434326172, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 1.6468, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.162, |
|
"grad_norm": 17.427703857421875, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.6618, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.164, |
|
"grad_norm": 15.554149627685547, |
|
"learning_rate": 5.466666666666667e-06, |
|
"loss": 1.6368, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.166, |
|
"grad_norm": 16.746028900146484, |
|
"learning_rate": 5.533333333333334e-06, |
|
"loss": 1.6583, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.168, |
|
"grad_norm": 17.273025512695312, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.625, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 20.172046661376953, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 1.6733, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.172, |
|
"grad_norm": 13.854218482971191, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 1.649, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.174, |
|
"grad_norm": 28.214906692504883, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.5396, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.176, |
|
"grad_norm": 17.772258758544922, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 1.5781, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.178, |
|
"grad_norm": 14.36558723449707, |
|
"learning_rate": 5.933333333333335e-06, |
|
"loss": 1.6857, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.606388092041016, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6192, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.182, |
|
"grad_norm": 16.106149673461914, |
|
"learning_rate": 6.066666666666667e-06, |
|
"loss": 1.568, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.184, |
|
"grad_norm": 11.074296951293945, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 1.631, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.186, |
|
"grad_norm": 15.625429153442383, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.6064, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.188, |
|
"grad_norm": 15.9578218460083, |
|
"learning_rate": 6.266666666666668e-06, |
|
"loss": 1.6646, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.56605339050293, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 1.5587, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 14.848856925964355, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.6036, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.194, |
|
"grad_norm": 22.246797561645508, |
|
"learning_rate": 6.466666666666667e-06, |
|
"loss": 1.6709, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.196, |
|
"grad_norm": 18.220890045166016, |
|
"learning_rate": 6.533333333333334e-06, |
|
"loss": 1.6603, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.198, |
|
"grad_norm": 18.9663028717041, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 1.7576, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 22.304872512817383, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.7278, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.202, |
|
"grad_norm": 22.000619888305664, |
|
"learning_rate": 6.733333333333334e-06, |
|
"loss": 1.7162, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.204, |
|
"grad_norm": 19.02189064025879, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.6294, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.206, |
|
"grad_norm": 17.57056999206543, |
|
"learning_rate": 6.866666666666667e-06, |
|
"loss": 1.651, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.208, |
|
"grad_norm": 20.44572639465332, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 1.6798, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 21.554231643676758, |
|
"learning_rate": 7e-06, |
|
"loss": 1.676, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.212, |
|
"grad_norm": 18.977502822875977, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 1.6736, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.214, |
|
"grad_norm": 18.957950592041016, |
|
"learning_rate": 7.133333333333334e-06, |
|
"loss": 1.7151, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.216, |
|
"grad_norm": 19.2664852142334, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.6875, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.218, |
|
"grad_norm": 18.646236419677734, |
|
"learning_rate": 7.266666666666668e-06, |
|
"loss": 1.6509, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 21.31477928161621, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 1.597, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.222, |
|
"grad_norm": 17.566238403320312, |
|
"learning_rate": 7.4e-06, |
|
"loss": 1.7574, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 13.890412330627441, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 1.5845, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.226, |
|
"grad_norm": 16.55223846435547, |
|
"learning_rate": 7.533333333333334e-06, |
|
"loss": 1.5888, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.228, |
|
"grad_norm": 26.499820709228516, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.584, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 31.17205810546875, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 1.6443, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.232, |
|
"grad_norm": 31.60078239440918, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 1.7377, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.234, |
|
"grad_norm": 27.893613815307617, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 1.779, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.236, |
|
"grad_norm": 16.482492446899414, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 1.7002, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.238, |
|
"grad_norm": 24.259340286254883, |
|
"learning_rate": 7.933333333333334e-06, |
|
"loss": 1.5989, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 18.561723709106445, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.7336, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.242, |
|
"grad_norm": 15.461645126342773, |
|
"learning_rate": 8.066666666666667e-06, |
|
"loss": 1.7121, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.244, |
|
"grad_norm": 22.555328369140625, |
|
"learning_rate": 8.133333333333334e-06, |
|
"loss": 1.7554, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.246, |
|
"grad_norm": 14.69766902923584, |
|
"learning_rate": 8.2e-06, |
|
"loss": 1.6397, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.248, |
|
"grad_norm": 15.5711030960083, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 1.7016, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 10.80448055267334, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.6544, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.252, |
|
"grad_norm": 18.54620361328125, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.623, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.254, |
|
"grad_norm": 34.518768310546875, |
|
"learning_rate": 8.466666666666668e-06, |
|
"loss": 1.6958, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 21.02113151550293, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 1.7662, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.258, |
|
"grad_norm": 14.756684303283691, |
|
"learning_rate": 8.6e-06, |
|
"loss": 1.5814, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 16.498056411743164, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.7248, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.262, |
|
"grad_norm": 19.80323028564453, |
|
"learning_rate": 8.733333333333333e-06, |
|
"loss": 1.7451, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.264, |
|
"grad_norm": 19.826833724975586, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.6475, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.266, |
|
"grad_norm": 15.338292121887207, |
|
"learning_rate": 8.866666666666668e-06, |
|
"loss": 1.623, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.268, |
|
"grad_norm": 12.683643341064453, |
|
"learning_rate": 8.933333333333333e-06, |
|
"loss": 1.6932, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 15.072694778442383, |
|
"learning_rate": 9e-06, |
|
"loss": 1.6978, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.272, |
|
"grad_norm": 17.20955467224121, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 1.6464, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.274, |
|
"grad_norm": 16.128379821777344, |
|
"learning_rate": 9.133333333333335e-06, |
|
"loss": 1.6678, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.276, |
|
"grad_norm": 15.234736442565918, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.4748, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.278, |
|
"grad_norm": 12.608988761901855, |
|
"learning_rate": 9.266666666666667e-06, |
|
"loss": 1.6285, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 20.843536376953125, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 1.699, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.282, |
|
"grad_norm": 18.524633407592773, |
|
"learning_rate": 9.4e-06, |
|
"loss": 1.6608, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.284, |
|
"grad_norm": 13.68975830078125, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 1.6153, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.286, |
|
"grad_norm": 14.197174072265625, |
|
"learning_rate": 9.533333333333334e-06, |
|
"loss": 1.7042, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 18.283809661865234, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.7929, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 14.084982872009277, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 1.7448, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.292, |
|
"grad_norm": 15.236905097961426, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 1.7063, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.294, |
|
"grad_norm": 14.99785327911377, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 1.6991, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.296, |
|
"grad_norm": 18.394018173217773, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 1.5488, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.298, |
|
"grad_norm": 11.244776725769043, |
|
"learning_rate": 9.933333333333334e-06, |
|
"loss": 1.6293, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 20.61504364013672, |
|
"learning_rate": 1e-05, |
|
"loss": 1.703, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.302, |
|
"grad_norm": 20.268144607543945, |
|
"learning_rate": 9.99998646145412e-06, |
|
"loss": 1.8211, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.304, |
|
"grad_norm": 17.08184242248535, |
|
"learning_rate": 9.999945845889795e-06, |
|
"loss": 1.5821, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.306, |
|
"grad_norm": 18.60841178894043, |
|
"learning_rate": 9.999878153526974e-06, |
|
"loss": 1.7683, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.308, |
|
"grad_norm": 11.097747802734375, |
|
"learning_rate": 9.999783384732242e-06, |
|
"loss": 1.6726, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 24.11589241027832, |
|
"learning_rate": 9.999661540018812e-06, |
|
"loss": 1.7708, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.312, |
|
"grad_norm": 14.22172737121582, |
|
"learning_rate": 9.999512620046523e-06, |
|
"loss": 1.6218, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.314, |
|
"grad_norm": 15.114810943603516, |
|
"learning_rate": 9.999336625621836e-06, |
|
"loss": 1.7024, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.316, |
|
"grad_norm": 11.347399711608887, |
|
"learning_rate": 9.99913355769784e-06, |
|
"loss": 1.7575, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.318, |
|
"grad_norm": 13.826147079467773, |
|
"learning_rate": 9.998903417374228e-06, |
|
"loss": 1.727, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 14.352263450622559, |
|
"learning_rate": 9.99864620589731e-06, |
|
"loss": 1.5935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.322, |
|
"grad_norm": 12.488923072814941, |
|
"learning_rate": 9.998361924659989e-06, |
|
"loss": 1.5656, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.324, |
|
"grad_norm": 18.001474380493164, |
|
"learning_rate": 9.998050575201772e-06, |
|
"loss": 1.6436, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.326, |
|
"grad_norm": 16.588930130004883, |
|
"learning_rate": 9.997712159208745e-06, |
|
"loss": 1.6952, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.328, |
|
"grad_norm": 15.841776847839355, |
|
"learning_rate": 9.99734667851357e-06, |
|
"loss": 1.6058, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 17.352310180664062, |
|
"learning_rate": 9.99695413509548e-06, |
|
"loss": 1.6962, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.332, |
|
"grad_norm": 12.201016426086426, |
|
"learning_rate": 9.99653453108026e-06, |
|
"loss": 1.5996, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.334, |
|
"grad_norm": 20.2280216217041, |
|
"learning_rate": 9.996087868740244e-06, |
|
"loss": 1.7222, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 15.186982154846191, |
|
"learning_rate": 9.995614150494293e-06, |
|
"loss": 1.7376, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.338, |
|
"grad_norm": 17.792768478393555, |
|
"learning_rate": 9.995113378907791e-06, |
|
"loss": 1.6865, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 12.01579761505127, |
|
"learning_rate": 9.994585556692624e-06, |
|
"loss": 1.6595, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.342, |
|
"grad_norm": 16.191465377807617, |
|
"learning_rate": 9.994030686707171e-06, |
|
"loss": 1.5984, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.344, |
|
"grad_norm": 15.65661334991455, |
|
"learning_rate": 9.993448771956285e-06, |
|
"loss": 1.6659, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.346, |
|
"grad_norm": 673.6796264648438, |
|
"learning_rate": 9.99283981559128e-06, |
|
"loss": 2.1504, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.348, |
|
"grad_norm": 11.247122764587402, |
|
"learning_rate": 9.992203820909906e-06, |
|
"loss": 1.7253, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.28847312927246, |
|
"learning_rate": 9.991540791356342e-06, |
|
"loss": 1.7098, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 13.881318092346191, |
|
"learning_rate": 9.99085073052117e-06, |
|
"loss": 1.5808, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.354, |
|
"grad_norm": 14.093522071838379, |
|
"learning_rate": 9.990133642141359e-06, |
|
"loss": 1.613, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.356, |
|
"grad_norm": 10.097461700439453, |
|
"learning_rate": 9.989389530100242e-06, |
|
"loss": 1.7024, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.358, |
|
"grad_norm": 12.82798957824707, |
|
"learning_rate": 9.988618398427495e-06, |
|
"loss": 1.6558, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 12.32880687713623, |
|
"learning_rate": 9.987820251299121e-06, |
|
"loss": 1.4778, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.362, |
|
"grad_norm": 8.794655799865723, |
|
"learning_rate": 9.986995093037422e-06, |
|
"loss": 1.6181, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.364, |
|
"grad_norm": 11.556446075439453, |
|
"learning_rate": 9.986142928110972e-06, |
|
"loss": 1.7423, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.366, |
|
"grad_norm": 23.587114334106445, |
|
"learning_rate": 9.985263761134602e-06, |
|
"loss": 1.7441, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.368, |
|
"grad_norm": 14.703253746032715, |
|
"learning_rate": 9.984357596869369e-06, |
|
"loss": 1.6989, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 11.022418975830078, |
|
"learning_rate": 9.98342444022253e-06, |
|
"loss": 1.6443, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.372, |
|
"grad_norm": 16.842967987060547, |
|
"learning_rate": 9.982464296247523e-06, |
|
"loss": 1.7194, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.374, |
|
"grad_norm": 14.422867774963379, |
|
"learning_rate": 9.981477170143924e-06, |
|
"loss": 1.6715, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.376, |
|
"grad_norm": 15.538463592529297, |
|
"learning_rate": 9.980463067257437e-06, |
|
"loss": 1.6339, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.378, |
|
"grad_norm": 10.891705513000488, |
|
"learning_rate": 9.979421993079853e-06, |
|
"loss": 1.6721, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 12.947959899902344, |
|
"learning_rate": 9.978353953249023e-06, |
|
"loss": 1.7966, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.382, |
|
"grad_norm": 9.348876953125, |
|
"learning_rate": 9.977258953548831e-06, |
|
"loss": 1.6517, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 8.993280410766602, |
|
"learning_rate": 9.976136999909156e-06, |
|
"loss": 1.6427, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.386, |
|
"grad_norm": 8.9512300491333, |
|
"learning_rate": 9.97498809840585e-06, |
|
"loss": 1.6192, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.388, |
|
"grad_norm": 9.874775886535645, |
|
"learning_rate": 9.973812255260692e-06, |
|
"loss": 1.6766, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 11.946427345275879, |
|
"learning_rate": 9.972609476841368e-06, |
|
"loss": 1.6853, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.392, |
|
"grad_norm": 11.508259773254395, |
|
"learning_rate": 9.971379769661422e-06, |
|
"loss": 1.6911, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.394, |
|
"grad_norm": 10.715835571289062, |
|
"learning_rate": 9.970123140380237e-06, |
|
"loss": 1.6015, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.396, |
|
"grad_norm": 11.727666854858398, |
|
"learning_rate": 9.968839595802982e-06, |
|
"loss": 1.6955, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.398, |
|
"grad_norm": 10.18913745880127, |
|
"learning_rate": 9.967529142880592e-06, |
|
"loss": 1.5734, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.491742134094238, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 1.659, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.402, |
|
"grad_norm": 8.228020668029785, |
|
"learning_rate": 9.964827540532685e-06, |
|
"loss": 1.5536, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.404, |
|
"grad_norm": 15.777739524841309, |
|
"learning_rate": 9.963436405737476e-06, |
|
"loss": 1.536, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.406, |
|
"grad_norm": 11.057550430297852, |
|
"learning_rate": 9.962018391857665e-06, |
|
"loss": 1.583, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.408, |
|
"grad_norm": 11.679120063781738, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 1.5931, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 18.90032386779785, |
|
"learning_rate": 9.959101757706308e-06, |
|
"loss": 1.7411, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.412, |
|
"grad_norm": 18.84972381591797, |
|
"learning_rate": 9.957603153229559e-06, |
|
"loss": 1.6154, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.414, |
|
"grad_norm": 13.331605911254883, |
|
"learning_rate": 9.95607770125771e-06, |
|
"loss": 1.677, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 13.559438705444336, |
|
"learning_rate": 9.95452541005172e-06, |
|
"loss": 1.6305, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.418, |
|
"grad_norm": 14.361797332763672, |
|
"learning_rate": 9.952946288017899e-06, |
|
"loss": 1.6408, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 16.02930450439453, |
|
"learning_rate": 9.951340343707852e-06, |
|
"loss": 1.7221, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.422, |
|
"grad_norm": 15.836488723754883, |
|
"learning_rate": 9.94970758581844e-06, |
|
"loss": 1.5559, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.424, |
|
"grad_norm": 13.252622604370117, |
|
"learning_rate": 9.948048023191728e-06, |
|
"loss": 1.6659, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.426, |
|
"grad_norm": 10.738286972045898, |
|
"learning_rate": 9.946361664814942e-06, |
|
"loss": 1.5846, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.428, |
|
"grad_norm": 12.482677459716797, |
|
"learning_rate": 9.94464851982042e-06, |
|
"loss": 1.7871, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.500411987304688, |
|
"learning_rate": 9.942908597485558e-06, |
|
"loss": 1.7273, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 10.43774700164795, |
|
"learning_rate": 9.941141907232766e-06, |
|
"loss": 1.7749, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.434, |
|
"grad_norm": 10.52614688873291, |
|
"learning_rate": 9.939348458629406e-06, |
|
"loss": 1.6396, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.436, |
|
"grad_norm": 9.133844375610352, |
|
"learning_rate": 9.937528261387753e-06, |
|
"loss": 1.6775, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.438, |
|
"grad_norm": 10.179513931274414, |
|
"learning_rate": 9.93568132536494e-06, |
|
"loss": 1.6362, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 10.565604209899902, |
|
"learning_rate": 9.933807660562898e-06, |
|
"loss": 1.6125, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.442, |
|
"grad_norm": 13.952414512634277, |
|
"learning_rate": 9.9319072771283e-06, |
|
"loss": 1.6762, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.444, |
|
"grad_norm": 13.159649848937988, |
|
"learning_rate": 9.929980185352525e-06, |
|
"loss": 1.7111, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.446, |
|
"grad_norm": 15.363605499267578, |
|
"learning_rate": 9.928026395671577e-06, |
|
"loss": 1.687, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 12.805254936218262, |
|
"learning_rate": 9.926045918666045e-06, |
|
"loss": 1.6426, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 9.896926879882812, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 1.657, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.452, |
|
"grad_norm": 8.098036766052246, |
|
"learning_rate": 9.92200494572614e-06, |
|
"loss": 1.5689, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.454, |
|
"grad_norm": 9.297266960144043, |
|
"learning_rate": 9.919944471675328e-06, |
|
"loss": 1.5641, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.456, |
|
"grad_norm": 11.86732292175293, |
|
"learning_rate": 9.91785735406693e-06, |
|
"loss": 1.5646, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.458, |
|
"grad_norm": 15.542577743530273, |
|
"learning_rate": 9.915743604203563e-06, |
|
"loss": 1.7098, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 15.652758598327637, |
|
"learning_rate": 9.913603233532067e-06, |
|
"loss": 1.6008, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.462, |
|
"grad_norm": 14.435830116271973, |
|
"learning_rate": 9.911436253643445e-06, |
|
"loss": 1.6086, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.464, |
|
"grad_norm": 12.681479454040527, |
|
"learning_rate": 9.909242676272797e-06, |
|
"loss": 1.7047, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.466, |
|
"grad_norm": 13.065205574035645, |
|
"learning_rate": 9.907022513299264e-06, |
|
"loss": 1.6552, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.468, |
|
"grad_norm": 10.368819236755371, |
|
"learning_rate": 9.904775776745959e-06, |
|
"loss": 1.6101, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 17.001413345336914, |
|
"learning_rate": 9.902502478779897e-06, |
|
"loss": 1.7077, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.472, |
|
"grad_norm": 9.429941177368164, |
|
"learning_rate": 9.90020263171194e-06, |
|
"loss": 1.5966, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.474, |
|
"grad_norm": 9.692448616027832, |
|
"learning_rate": 9.89787624799672e-06, |
|
"loss": 1.6065, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.476, |
|
"grad_norm": 12.412395477294922, |
|
"learning_rate": 9.89552334023258e-06, |
|
"loss": 1.8045, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.478, |
|
"grad_norm": 10.290336608886719, |
|
"learning_rate": 9.893143921161501e-06, |
|
"loss": 1.5883, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 16.823965072631836, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 1.6098, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.482, |
|
"grad_norm": 10.46012020111084, |
|
"learning_rate": 9.888305600784217e-06, |
|
"loss": 1.6483, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.484, |
|
"grad_norm": 9.59673023223877, |
|
"learning_rate": 9.88584672567954e-06, |
|
"loss": 1.5618, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.486, |
|
"grad_norm": 11.56871223449707, |
|
"learning_rate": 9.883361391670841e-06, |
|
"loss": 1.698, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.488, |
|
"grad_norm": 6.657703399658203, |
|
"learning_rate": 9.880849612217238e-06, |
|
"loss": 1.689, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 11.670310020446777, |
|
"learning_rate": 9.878311400921072e-06, |
|
"loss": 1.694, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.492, |
|
"grad_norm": 9.138250350952148, |
|
"learning_rate": 9.875746771527817e-06, |
|
"loss": 1.6979, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.494, |
|
"grad_norm": 10.32927131652832, |
|
"learning_rate": 9.873155737926014e-06, |
|
"loss": 1.6198, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.496, |
|
"grad_norm": 8.178282737731934, |
|
"learning_rate": 9.870538314147194e-06, |
|
"loss": 1.6388, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.498, |
|
"grad_norm": 10.270970344543457, |
|
"learning_rate": 9.867894514365802e-06, |
|
"loss": 1.5972, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 8.778549194335938, |
|
"learning_rate": 9.86522435289912e-06, |
|
"loss": 1.7337, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.502, |
|
"grad_norm": 7.87909460067749, |
|
"learning_rate": 9.862527844207189e-06, |
|
"loss": 1.6102, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.504, |
|
"grad_norm": 9.182847023010254, |
|
"learning_rate": 9.859805002892733e-06, |
|
"loss": 1.7548, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.506, |
|
"grad_norm": 7.970770835876465, |
|
"learning_rate": 9.857055843701073e-06, |
|
"loss": 1.6482, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.508, |
|
"grad_norm": 13.17197322845459, |
|
"learning_rate": 9.85428038152006e-06, |
|
"loss": 1.663, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.727519035339355, |
|
"learning_rate": 9.851478631379982e-06, |
|
"loss": 1.613, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 11.023371696472168, |
|
"learning_rate": 9.84865060845349e-06, |
|
"loss": 1.6383, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.514, |
|
"grad_norm": 11.075227737426758, |
|
"learning_rate": 9.845796328055505e-06, |
|
"loss": 1.6506, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.516, |
|
"grad_norm": 10.624147415161133, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 1.6063, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.518, |
|
"grad_norm": 8.243653297424316, |
|
"learning_rate": 9.840009056815674e-06, |
|
"loss": 1.6393, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 9.168231964111328, |
|
"learning_rate": 9.83707609731432e-06, |
|
"loss": 1.5481, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.522, |
|
"grad_norm": 10.238672256469727, |
|
"learning_rate": 9.834116943022299e-06, |
|
"loss": 1.703, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.524, |
|
"grad_norm": 6.614677906036377, |
|
"learning_rate": 9.831131609964664e-06, |
|
"loss": 1.5414, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.526, |
|
"grad_norm": 7.963540554046631, |
|
"learning_rate": 9.828120114308248e-06, |
|
"loss": 1.603, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 11.000412940979004, |
|
"learning_rate": 9.825082472361558e-06, |
|
"loss": 1.7098, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 10.786978721618652, |
|
"learning_rate": 9.822018700574696e-06, |
|
"loss": 1.6766, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.532, |
|
"grad_norm": 9.823018074035645, |
|
"learning_rate": 9.818928815539266e-06, |
|
"loss": 1.6362, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.534, |
|
"grad_norm": 10.396644592285156, |
|
"learning_rate": 9.815812833988292e-06, |
|
"loss": 1.8225, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.536, |
|
"grad_norm": 11.278525352478027, |
|
"learning_rate": 9.812670772796113e-06, |
|
"loss": 1.7868, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.538, |
|
"grad_norm": 8.690960884094238, |
|
"learning_rate": 9.809502648978311e-06, |
|
"loss": 1.567, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 11.479389190673828, |
|
"learning_rate": 9.806308479691595e-06, |
|
"loss": 1.6175, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.542, |
|
"grad_norm": 8.428929328918457, |
|
"learning_rate": 9.803088282233733e-06, |
|
"loss": 1.6575, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 8.638998985290527, |
|
"learning_rate": 9.799842074043438e-06, |
|
"loss": 1.6002, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.546, |
|
"grad_norm": 9.341121673583984, |
|
"learning_rate": 9.796569872700287e-06, |
|
"loss": 1.672, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.548, |
|
"grad_norm": 9.511893272399902, |
|
"learning_rate": 9.793271695924621e-06, |
|
"loss": 1.7315, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 12.620855331420898, |
|
"learning_rate": 9.789947561577445e-06, |
|
"loss": 1.63, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.552, |
|
"grad_norm": 14.644619941711426, |
|
"learning_rate": 9.786597487660336e-06, |
|
"loss": 1.649, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.554, |
|
"grad_norm": 8.164085388183594, |
|
"learning_rate": 9.78322149231535e-06, |
|
"loss": 1.6249, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.556, |
|
"grad_norm": 12.925362586975098, |
|
"learning_rate": 9.779819593824909e-06, |
|
"loss": 1.572, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.558, |
|
"grad_norm": 10.382940292358398, |
|
"learning_rate": 9.776391810611719e-06, |
|
"loss": 1.6225, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 14.114727973937988, |
|
"learning_rate": 9.77293816123866e-06, |
|
"loss": 1.4772, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.562, |
|
"grad_norm": 9.955523490905762, |
|
"learning_rate": 9.769458664408689e-06, |
|
"loss": 1.6424, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.564, |
|
"grad_norm": 10.805880546569824, |
|
"learning_rate": 9.765953338964736e-06, |
|
"loss": 1.5621, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.566, |
|
"grad_norm": 9.07789134979248, |
|
"learning_rate": 9.762422203889604e-06, |
|
"loss": 1.6892, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.568, |
|
"grad_norm": 13.484134674072266, |
|
"learning_rate": 9.75886527830587e-06, |
|
"loss": 1.7468, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 10.758011817932129, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 1.699, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.572, |
|
"grad_norm": 8.382722854614258, |
|
"learning_rate": 9.751674132801106e-06, |
|
"loss": 1.5586, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.574, |
|
"grad_norm": 7.959819316864014, |
|
"learning_rate": 9.748039951823141e-06, |
|
"loss": 1.5625, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 8.60888671875, |
|
"learning_rate": 9.744380058222483e-06, |
|
"loss": 1.5166, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.578, |
|
"grad_norm": 10.173426628112793, |
|
"learning_rate": 9.740694471818988e-06, |
|
"loss": 1.7414, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 8.694028854370117, |
|
"learning_rate": 9.736983212571646e-06, |
|
"loss": 1.5719, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.582, |
|
"grad_norm": 12.50915241241455, |
|
"learning_rate": 9.733246300578482e-06, |
|
"loss": 1.5379, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.584, |
|
"grad_norm": 7.476858615875244, |
|
"learning_rate": 9.729483756076436e-06, |
|
"loss": 1.5798, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.586, |
|
"grad_norm": 8.575016975402832, |
|
"learning_rate": 9.72569559944126e-06, |
|
"loss": 1.6348, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.588, |
|
"grad_norm": 10.078662872314453, |
|
"learning_rate": 9.721881851187406e-06, |
|
"loss": 1.6831, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 12.232658386230469, |
|
"learning_rate": 9.718042531967918e-06, |
|
"loss": 1.687, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.592, |
|
"grad_norm": 10.713579177856445, |
|
"learning_rate": 9.714177662574316e-06, |
|
"loss": 1.6168, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.594, |
|
"grad_norm": 10.275739669799805, |
|
"learning_rate": 9.710287263936485e-06, |
|
"loss": 1.7538, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.596, |
|
"grad_norm": 7.381926536560059, |
|
"learning_rate": 9.70637135712256e-06, |
|
"loss": 1.5313, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.598, |
|
"grad_norm": 10.051726341247559, |
|
"learning_rate": 9.702429963338812e-06, |
|
"loss": 1.7786, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 10.447134017944336, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 1.6418, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.602, |
|
"grad_norm": 13.121293067932129, |
|
"learning_rate": 9.694470800376951e-06, |
|
"loss": 1.7257, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.604, |
|
"grad_norm": 8.965837478637695, |
|
"learning_rate": 9.690453074301035e-06, |
|
"loss": 1.6921, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.606, |
|
"grad_norm": 13.510847091674805, |
|
"learning_rate": 9.68640994745946e-06, |
|
"loss": 1.7176, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 10.887725830078125, |
|
"learning_rate": 9.682341441747446e-06, |
|
"loss": 1.7061, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 8.40693473815918, |
|
"learning_rate": 9.678247579197658e-06, |
|
"loss": 1.6644, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.612, |
|
"grad_norm": 11.772154808044434, |
|
"learning_rate": 9.674128381980073e-06, |
|
"loss": 1.7006, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.614, |
|
"grad_norm": 10.446290969848633, |
|
"learning_rate": 9.669983872401868e-06, |
|
"loss": 1.7206, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.616, |
|
"grad_norm": 10.989177703857422, |
|
"learning_rate": 9.665814072907293e-06, |
|
"loss": 1.6525, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.618, |
|
"grad_norm": 11.617728233337402, |
|
"learning_rate": 9.661619006077562e-06, |
|
"loss": 1.6366, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 12.805769920349121, |
|
"learning_rate": 9.657398694630713e-06, |
|
"loss": 1.7126, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.622, |
|
"grad_norm": 10.547633171081543, |
|
"learning_rate": 9.653153161421497e-06, |
|
"loss": 1.6447, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 11.312098503112793, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 1.732, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.626, |
|
"grad_norm": 11.163432121276855, |
|
"learning_rate": 9.644586521817792e-06, |
|
"loss": 1.5886, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.628, |
|
"grad_norm": 14.074267387390137, |
|
"learning_rate": 9.640265461815235e-06, |
|
"loss": 1.6658, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.643535614013672, |
|
"learning_rate": 9.635919272833938e-06, |
|
"loss": 1.5441, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.632, |
|
"grad_norm": 17.20092010498047, |
|
"learning_rate": 9.63154797841033e-06, |
|
"loss": 1.6208, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.634, |
|
"grad_norm": 9.415007591247559, |
|
"learning_rate": 9.627151602216801e-06, |
|
"loss": 1.5145, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.636, |
|
"grad_norm": 14.709664344787598, |
|
"learning_rate": 9.622730168061568e-06, |
|
"loss": 1.7034, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.638, |
|
"grad_norm": 13.328878402709961, |
|
"learning_rate": 9.618283699888543e-06, |
|
"loss": 1.7108, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 9.980976104736328, |
|
"learning_rate": 9.613812221777212e-06, |
|
"loss": 1.6241, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.642, |
|
"grad_norm": 11.059935569763184, |
|
"learning_rate": 9.609315757942504e-06, |
|
"loss": 1.7884, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.644, |
|
"grad_norm": 8.424722671508789, |
|
"learning_rate": 9.604794332734647e-06, |
|
"loss": 1.5701, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.646, |
|
"grad_norm": 13.05691146850586, |
|
"learning_rate": 9.600247970639053e-06, |
|
"loss": 1.6983, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.648, |
|
"grad_norm": 10.937281608581543, |
|
"learning_rate": 9.595676696276173e-06, |
|
"loss": 1.7072, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 10.963797569274902, |
|
"learning_rate": 9.591080534401371e-06, |
|
"loss": 1.7535, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.652, |
|
"grad_norm": 11.561660766601562, |
|
"learning_rate": 9.586459509904786e-06, |
|
"loss": 1.564, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.654, |
|
"grad_norm": 9.038650512695312, |
|
"learning_rate": 9.581813647811199e-06, |
|
"loss": 1.7599, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.656, |
|
"grad_norm": 9.181880950927734, |
|
"learning_rate": 9.577142973279896e-06, |
|
"loss": 1.5822, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.658, |
|
"grad_norm": 9.09460735321045, |
|
"learning_rate": 9.572447511604536e-06, |
|
"loss": 1.6547, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.515859603881836, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 1.59, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.662, |
|
"grad_norm": 11.761797904968262, |
|
"learning_rate": 9.56298232866729e-06, |
|
"loss": 1.7411, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.664, |
|
"grad_norm": 10.45743179321289, |
|
"learning_rate": 9.55821265866333e-06, |
|
"loss": 1.6905, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.666, |
|
"grad_norm": 8.767599105834961, |
|
"learning_rate": 9.553418304030886e-06, |
|
"loss": 1.624, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.668, |
|
"grad_norm": 8.311175346374512, |
|
"learning_rate": 9.548599290733393e-06, |
|
"loss": 1.7281, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 10.42666244506836, |
|
"learning_rate": 9.543755644867823e-06, |
|
"loss": 1.6878, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 7.798083782196045, |
|
"learning_rate": 9.538887392664544e-06, |
|
"loss": 1.623, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.674, |
|
"grad_norm": 10.574175834655762, |
|
"learning_rate": 9.53399456048718e-06, |
|
"loss": 1.6386, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.676, |
|
"grad_norm": 8.909551620483398, |
|
"learning_rate": 9.529077174832466e-06, |
|
"loss": 1.7324, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.678, |
|
"grad_norm": 7.710360527038574, |
|
"learning_rate": 9.524135262330098e-06, |
|
"loss": 1.6373, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 13.496674537658691, |
|
"learning_rate": 9.519168849742603e-06, |
|
"loss": 1.6202, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.682, |
|
"grad_norm": 7.529291152954102, |
|
"learning_rate": 9.514177963965181e-06, |
|
"loss": 1.7156, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.684, |
|
"grad_norm": 7.229175567626953, |
|
"learning_rate": 9.50916263202557e-06, |
|
"loss": 1.663, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.686, |
|
"grad_norm": 12.523305892944336, |
|
"learning_rate": 9.504122881083886e-06, |
|
"loss": 1.6902, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.688, |
|
"grad_norm": 10.700648307800293, |
|
"learning_rate": 9.499058738432492e-06, |
|
"loss": 1.6298, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 7.542520523071289, |
|
"learning_rate": 9.493970231495836e-06, |
|
"loss": 1.5829, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.692, |
|
"grad_norm": 9.304771423339844, |
|
"learning_rate": 9.488857387830315e-06, |
|
"loss": 1.6467, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.694, |
|
"grad_norm": 11.418816566467285, |
|
"learning_rate": 9.483720235124113e-06, |
|
"loss": 1.7727, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.696, |
|
"grad_norm": 7.398611068725586, |
|
"learning_rate": 9.478558801197065e-06, |
|
"loss": 1.5846, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.698, |
|
"grad_norm": 10.512649536132812, |
|
"learning_rate": 9.473373114000493e-06, |
|
"loss": 1.7182, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 9.7409029006958, |
|
"learning_rate": 9.468163201617063e-06, |
|
"loss": 1.6869, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.702, |
|
"grad_norm": 11.419679641723633, |
|
"learning_rate": 9.46292909226063e-06, |
|
"loss": 1.6018, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 12.171987533569336, |
|
"learning_rate": 9.457670814276083e-06, |
|
"loss": 1.5906, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.706, |
|
"grad_norm": 10.171733856201172, |
|
"learning_rate": 9.452388396139202e-06, |
|
"loss": 1.6307, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.708, |
|
"grad_norm": 11.288997650146484, |
|
"learning_rate": 9.44708186645649e-06, |
|
"loss": 1.627, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 10.578997611999512, |
|
"learning_rate": 9.441751253965022e-06, |
|
"loss": 1.7024, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.712, |
|
"grad_norm": 8.29975414276123, |
|
"learning_rate": 9.436396587532297e-06, |
|
"loss": 1.7009, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.714, |
|
"grad_norm": 9.676756858825684, |
|
"learning_rate": 9.431017896156074e-06, |
|
"loss": 1.6421, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.716, |
|
"grad_norm": 8.530579566955566, |
|
"learning_rate": 9.425615208964217e-06, |
|
"loss": 1.619, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.718, |
|
"grad_norm": 6.746725082397461, |
|
"learning_rate": 9.420188555214537e-06, |
|
"loss": 1.5287, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 7.753213405609131, |
|
"learning_rate": 9.414737964294636e-06, |
|
"loss": 1.53, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.722, |
|
"grad_norm": 7.940869331359863, |
|
"learning_rate": 9.40926346572174e-06, |
|
"loss": 1.5859, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.724, |
|
"grad_norm": 8.257153511047363, |
|
"learning_rate": 9.403765089142554e-06, |
|
"loss": 1.559, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.726, |
|
"grad_norm": 8.980607032775879, |
|
"learning_rate": 9.398242864333084e-06, |
|
"loss": 1.6408, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.728, |
|
"grad_norm": 12.678767204284668, |
|
"learning_rate": 9.392696821198488e-06, |
|
"loss": 1.6613, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 9.08625316619873, |
|
"learning_rate": 9.38712698977291e-06, |
|
"loss": 1.6557, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.732, |
|
"grad_norm": 9.90092658996582, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 1.7143, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.734, |
|
"grad_norm": 15.684215545654297, |
|
"learning_rate": 9.375916082829341e-06, |
|
"loss": 1.6149, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 8.9537935256958, |
|
"learning_rate": 9.370275068023097e-06, |
|
"loss": 1.5268, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.738, |
|
"grad_norm": 10.356677055358887, |
|
"learning_rate": 9.364610386349048e-06, |
|
"loss": 1.7228, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 10.680842399597168, |
|
"learning_rate": 9.358922068483813e-06, |
|
"loss": 1.762, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.742, |
|
"grad_norm": 9.59675121307373, |
|
"learning_rate": 9.35321014523201e-06, |
|
"loss": 1.6749, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.744, |
|
"grad_norm": 11.908790588378906, |
|
"learning_rate": 9.347474647526095e-06, |
|
"loss": 1.67, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.746, |
|
"grad_norm": 10.074137687683105, |
|
"learning_rate": 9.34171560642619e-06, |
|
"loss": 1.6049, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.748, |
|
"grad_norm": 9.519111633300781, |
|
"learning_rate": 9.335933053119906e-06, |
|
"loss": 1.7383, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 11.124157905578613, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 1.5772, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.752, |
|
"grad_norm": 9.715572357177734, |
|
"learning_rate": 9.324297535275156e-06, |
|
"loss": 1.6024, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.754, |
|
"grad_norm": 14.774503707885742, |
|
"learning_rate": 9.318444633747884e-06, |
|
"loss": 1.614, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.756, |
|
"grad_norm": 15.559951782226562, |
|
"learning_rate": 9.312568346036288e-06, |
|
"loss": 1.6546, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.758, |
|
"grad_norm": 9.326455116271973, |
|
"learning_rate": 9.306668703962927e-06, |
|
"loss": 1.6567, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 10.113893508911133, |
|
"learning_rate": 9.30074573947683e-06, |
|
"loss": 1.6268, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.762, |
|
"grad_norm": 10.376815795898438, |
|
"learning_rate": 9.294799484653323e-06, |
|
"loss": 1.649, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.764, |
|
"grad_norm": 12.478555679321289, |
|
"learning_rate": 9.288829971693869e-06, |
|
"loss": 1.5325, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.766, |
|
"grad_norm": 8.862922668457031, |
|
"learning_rate": 9.282837232925876e-06, |
|
"loss": 1.5611, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 9.65459156036377, |
|
"learning_rate": 9.276821300802535e-06, |
|
"loss": 1.6238, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 10.751141548156738, |
|
"learning_rate": 9.27078220790263e-06, |
|
"loss": 1.629, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.772, |
|
"grad_norm": 11.15700912475586, |
|
"learning_rate": 9.264719986930376e-06, |
|
"loss": 1.757, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.774, |
|
"grad_norm": 11.11645221710205, |
|
"learning_rate": 9.25863467071524e-06, |
|
"loss": 1.7262, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.776, |
|
"grad_norm": 9.191835403442383, |
|
"learning_rate": 9.25252629221175e-06, |
|
"loss": 1.5161, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.778, |
|
"grad_norm": 8.911935806274414, |
|
"learning_rate": 9.246394884499334e-06, |
|
"loss": 1.6185, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 10.155871391296387, |
|
"learning_rate": 9.24024048078213e-06, |
|
"loss": 1.6151, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.782, |
|
"grad_norm": 7.377295970916748, |
|
"learning_rate": 9.234063114388809e-06, |
|
"loss": 1.5232, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.784, |
|
"grad_norm": 10.363668441772461, |
|
"learning_rate": 9.227862818772392e-06, |
|
"loss": 1.6332, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.786, |
|
"grad_norm": 8.449118614196777, |
|
"learning_rate": 9.221639627510076e-06, |
|
"loss": 1.5769, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.788, |
|
"grad_norm": 8.908105850219727, |
|
"learning_rate": 9.215393574303043e-06, |
|
"loss": 1.7008, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 9.45577335357666, |
|
"learning_rate": 9.209124692976287e-06, |
|
"loss": 1.6707, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.792, |
|
"grad_norm": 9.608206748962402, |
|
"learning_rate": 9.202833017478421e-06, |
|
"loss": 1.6652, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.794, |
|
"grad_norm": 9.177596092224121, |
|
"learning_rate": 9.196518581881502e-06, |
|
"loss": 1.7067, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.796, |
|
"grad_norm": 5.568012714385986, |
|
"learning_rate": 9.190181420380838e-06, |
|
"loss": 1.6233, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.798, |
|
"grad_norm": 10.35659122467041, |
|
"learning_rate": 9.18382156729481e-06, |
|
"loss": 1.6694, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.523914813995361, |
|
"learning_rate": 9.177439057064684e-06, |
|
"loss": 1.6281, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.802, |
|
"grad_norm": 7.155726432800293, |
|
"learning_rate": 9.171033924254421e-06, |
|
"loss": 1.6411, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.804, |
|
"grad_norm": 11.25847339630127, |
|
"learning_rate": 9.164606203550498e-06, |
|
"loss": 1.5891, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.806, |
|
"grad_norm": 12.162601470947266, |
|
"learning_rate": 9.15815592976171e-06, |
|
"loss": 1.5304, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.808, |
|
"grad_norm": 9.000397682189941, |
|
"learning_rate": 9.151683137818989e-06, |
|
"loss": 1.6407, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 8.760917663574219, |
|
"learning_rate": 9.145187862775208e-06, |
|
"loss": 1.6462, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.812, |
|
"grad_norm": 9.060429573059082, |
|
"learning_rate": 9.138670139805004e-06, |
|
"loss": 1.6845, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.814, |
|
"grad_norm": 8.54736614227295, |
|
"learning_rate": 9.132130004204569e-06, |
|
"loss": 1.7051, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 10.724129676818848, |
|
"learning_rate": 9.125567491391476e-06, |
|
"loss": 1.6058, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.818, |
|
"grad_norm": 9.786903381347656, |
|
"learning_rate": 9.118982636904476e-06, |
|
"loss": 1.6181, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 9.084239959716797, |
|
"learning_rate": 9.112375476403313e-06, |
|
"loss": 1.7077, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.822, |
|
"grad_norm": 8.961618423461914, |
|
"learning_rate": 9.10574604566852e-06, |
|
"loss": 1.6431, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.824, |
|
"grad_norm": 8.100645065307617, |
|
"learning_rate": 9.099094380601244e-06, |
|
"loss": 1.6346, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.826, |
|
"grad_norm": 10.302638053894043, |
|
"learning_rate": 9.09242051722303e-06, |
|
"loss": 1.728, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.828, |
|
"grad_norm": 8.645998001098633, |
|
"learning_rate": 9.085724491675642e-06, |
|
"loss": 1.6298, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.636702060699463, |
|
"learning_rate": 9.079006340220862e-06, |
|
"loss": 1.7458, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 9.560812950134277, |
|
"learning_rate": 9.072266099240286e-06, |
|
"loss": 1.6252, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.834, |
|
"grad_norm": 8.170851707458496, |
|
"learning_rate": 9.065503805235139e-06, |
|
"loss": 1.5759, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.836, |
|
"grad_norm": 10.425228118896484, |
|
"learning_rate": 9.058719494826076e-06, |
|
"loss": 1.7297, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.838, |
|
"grad_norm": 8.824040412902832, |
|
"learning_rate": 9.051913204752972e-06, |
|
"loss": 1.7309, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 9.032853126525879, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 1.5986, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.842, |
|
"grad_norm": 8.494746208190918, |
|
"learning_rate": 9.03823483316911e-06, |
|
"loss": 1.6812, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.844, |
|
"grad_norm": 9.491890907287598, |
|
"learning_rate": 9.031362825732456e-06, |
|
"loss": 1.6809, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.846, |
|
"grad_norm": 10.895613670349121, |
|
"learning_rate": 9.02446898677957e-06, |
|
"loss": 1.6665, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.848, |
|
"grad_norm": 6.491031169891357, |
|
"learning_rate": 9.017553353643479e-06, |
|
"loss": 1.6723, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 8.402596473693848, |
|
"learning_rate": 9.01061596377522e-06, |
|
"loss": 1.5644, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.852, |
|
"grad_norm": 8.312848091125488, |
|
"learning_rate": 9.003656854743667e-06, |
|
"loss": 1.5704, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.854, |
|
"grad_norm": 10.550932884216309, |
|
"learning_rate": 8.996676064235308e-06, |
|
"loss": 1.6347, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.856, |
|
"grad_norm": 7.069148540496826, |
|
"learning_rate": 8.989673630054044e-06, |
|
"loss": 1.5883, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.858, |
|
"grad_norm": 8.47697639465332, |
|
"learning_rate": 8.982649590120982e-06, |
|
"loss": 1.6156, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 10.076146125793457, |
|
"learning_rate": 8.97560398247424e-06, |
|
"loss": 1.6816, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.862, |
|
"grad_norm": 9.539214134216309, |
|
"learning_rate": 8.96853684526873e-06, |
|
"loss": 1.6875, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 10.413458824157715, |
|
"learning_rate": 8.961448216775955e-06, |
|
"loss": 1.6266, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.866, |
|
"grad_norm": 9.216937065124512, |
|
"learning_rate": 8.954338135383804e-06, |
|
"loss": 1.6485, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.868, |
|
"grad_norm": 8.924684524536133, |
|
"learning_rate": 8.947206639596346e-06, |
|
"loss": 1.5875, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 9.03341007232666, |
|
"learning_rate": 8.94005376803361e-06, |
|
"loss": 1.562, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.872, |
|
"grad_norm": 7.471339225769043, |
|
"learning_rate": 8.932879559431392e-06, |
|
"loss": 1.5646, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.874, |
|
"grad_norm": 12.005831718444824, |
|
"learning_rate": 8.925684052641027e-06, |
|
"loss": 1.7528, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.876, |
|
"grad_norm": 7.764755725860596, |
|
"learning_rate": 8.9184672866292e-06, |
|
"loss": 1.6857, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.878, |
|
"grad_norm": 7.975648403167725, |
|
"learning_rate": 8.911229300477716e-06, |
|
"loss": 1.7224, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 7.063853740692139, |
|
"learning_rate": 8.903970133383297e-06, |
|
"loss": 1.67, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.882, |
|
"grad_norm": 9.282867431640625, |
|
"learning_rate": 8.896689824657371e-06, |
|
"loss": 1.5403, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.884, |
|
"grad_norm": 7.045912265777588, |
|
"learning_rate": 8.889388413725857e-06, |
|
"loss": 1.5697, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.886, |
|
"grad_norm": 7.684753894805908, |
|
"learning_rate": 8.882065940128946e-06, |
|
"loss": 1.6201, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.888, |
|
"grad_norm": 11.638023376464844, |
|
"learning_rate": 8.874722443520898e-06, |
|
"loss": 1.6565, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 8.81474781036377, |
|
"learning_rate": 8.867357963669821e-06, |
|
"loss": 1.6399, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.892, |
|
"grad_norm": 9.439010620117188, |
|
"learning_rate": 8.859972540457451e-06, |
|
"loss": 1.7231, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.894, |
|
"grad_norm": 8.762112617492676, |
|
"learning_rate": 8.852566213878947e-06, |
|
"loss": 1.5841, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 8.807167053222656, |
|
"learning_rate": 8.845139024042664e-06, |
|
"loss": 1.6177, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.898, |
|
"grad_norm": 7.984807014465332, |
|
"learning_rate": 8.837691011169944e-06, |
|
"loss": 1.6911, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 9.620468139648438, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 1.6385, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.902, |
|
"grad_norm": 8.629189491271973, |
|
"learning_rate": 8.822732677764158e-06, |
|
"loss": 1.6133, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.904, |
|
"grad_norm": 10.831542015075684, |
|
"learning_rate": 8.815222438236726e-06, |
|
"loss": 1.655, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.906, |
|
"grad_norm": 7.558828353881836, |
|
"learning_rate": 8.807691537683685e-06, |
|
"loss": 1.5384, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.908, |
|
"grad_norm": 10.0075044631958, |
|
"learning_rate": 8.800140016888009e-06, |
|
"loss": 1.6663, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.153804779052734, |
|
"learning_rate": 8.792567916744346e-06, |
|
"loss": 1.73, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 7.489542007446289, |
|
"learning_rate": 8.784975278258783e-06, |
|
"loss": 1.6016, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.914, |
|
"grad_norm": 6.917328357696533, |
|
"learning_rate": 8.777362142548636e-06, |
|
"loss": 1.6948, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.916, |
|
"grad_norm": 6.830845832824707, |
|
"learning_rate": 8.769728550842217e-06, |
|
"loss": 1.6222, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.918, |
|
"grad_norm": 7.163386821746826, |
|
"learning_rate": 8.762074544478622e-06, |
|
"loss": 1.6015, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 8.106656074523926, |
|
"learning_rate": 8.754400164907496e-06, |
|
"loss": 1.6755, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.922, |
|
"grad_norm": 8.358505249023438, |
|
"learning_rate": 8.746705453688815e-06, |
|
"loss": 1.5817, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.924, |
|
"grad_norm": 10.56636905670166, |
|
"learning_rate": 8.73899045249266e-06, |
|
"loss": 1.6605, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.926, |
|
"grad_norm": 8.793217658996582, |
|
"learning_rate": 8.73125520309899e-06, |
|
"loss": 1.6739, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 7.576605319976807, |
|
"learning_rate": 8.723499747397415e-06, |
|
"loss": 1.5703, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.332236289978027, |
|
"learning_rate": 8.715724127386971e-06, |
|
"loss": 1.5906, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.932, |
|
"grad_norm": 8.388971328735352, |
|
"learning_rate": 8.707928385175898e-06, |
|
"loss": 1.6549, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.934, |
|
"grad_norm": 7.761672019958496, |
|
"learning_rate": 8.700112562981398e-06, |
|
"loss": 1.5616, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.936, |
|
"grad_norm": 9.178117752075195, |
|
"learning_rate": 8.692276703129421e-06, |
|
"loss": 1.7963, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.938, |
|
"grad_norm": 6.7894287109375, |
|
"learning_rate": 8.68442084805442e-06, |
|
"loss": 1.579, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 8.14854907989502, |
|
"learning_rate": 8.676545040299145e-06, |
|
"loss": 1.7024, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.942, |
|
"grad_norm": 8.014143943786621, |
|
"learning_rate": 8.668649322514382e-06, |
|
"loss": 1.6969, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.944, |
|
"grad_norm": 7.022289752960205, |
|
"learning_rate": 8.660733737458751e-06, |
|
"loss": 1.5763, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.946, |
|
"grad_norm": 7.455463886260986, |
|
"learning_rate": 8.652798327998458e-06, |
|
"loss": 1.6751, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.948, |
|
"grad_norm": 10.453346252441406, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 1.6393, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 10.011568069458008, |
|
"learning_rate": 8.636868207865244e-06, |
|
"loss": 1.6452, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.952, |
|
"grad_norm": 10.156243324279785, |
|
"learning_rate": 8.628873583460593e-06, |
|
"loss": 1.6624, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.954, |
|
"grad_norm": 8.829718589782715, |
|
"learning_rate": 8.620859307187339e-06, |
|
"loss": 1.6908, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.956, |
|
"grad_norm": 10.530731201171875, |
|
"learning_rate": 8.61282542244614e-06, |
|
"loss": 1.6371, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.958, |
|
"grad_norm": 9.92332935333252, |
|
"learning_rate": 8.604771972743848e-06, |
|
"loss": 1.6542, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.439817905426025, |
|
"learning_rate": 8.596699001693257e-06, |
|
"loss": 1.7083, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.962, |
|
"grad_norm": 7.941191673278809, |
|
"learning_rate": 8.588606553012884e-06, |
|
"loss": 1.5819, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.964, |
|
"grad_norm": 7.217868328094482, |
|
"learning_rate": 8.580494670526725e-06, |
|
"loss": 1.7362, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.966, |
|
"grad_norm": 8.529379844665527, |
|
"learning_rate": 8.572363398164017e-06, |
|
"loss": 1.7196, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.968, |
|
"grad_norm": 8.774826049804688, |
|
"learning_rate": 8.564212779959003e-06, |
|
"loss": 1.6536, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 8.855049133300781, |
|
"learning_rate": 8.556042860050686e-06, |
|
"loss": 1.6759, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.972, |
|
"grad_norm": 7.4477620124816895, |
|
"learning_rate": 8.547853682682605e-06, |
|
"loss": 1.6095, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.974, |
|
"grad_norm": 8.155046463012695, |
|
"learning_rate": 8.539645292202579e-06, |
|
"loss": 1.6397, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.976, |
|
"grad_norm": 9.956945419311523, |
|
"learning_rate": 8.531417733062476e-06, |
|
"loss": 1.5748, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.978, |
|
"grad_norm": 9.320968627929688, |
|
"learning_rate": 8.523171049817974e-06, |
|
"loss": 1.6588, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 7.276108264923096, |
|
"learning_rate": 8.51490528712831e-06, |
|
"loss": 1.6456, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.982, |
|
"grad_norm": 11.987957954406738, |
|
"learning_rate": 8.506620489756045e-06, |
|
"loss": 1.6061, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.984, |
|
"grad_norm": 9.098200798034668, |
|
"learning_rate": 8.498316702566828e-06, |
|
"loss": 1.6903, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.986, |
|
"grad_norm": 7.814845561981201, |
|
"learning_rate": 8.489993970529137e-06, |
|
"loss": 1.4855, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.988, |
|
"grad_norm": 7.096703052520752, |
|
"learning_rate": 8.481652338714048e-06, |
|
"loss": 1.6221, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 6.307745456695557, |
|
"learning_rate": 8.473291852294986e-06, |
|
"loss": 1.5903, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 7.393770694732666, |
|
"learning_rate": 8.464912556547486e-06, |
|
"loss": 1.5778, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.994, |
|
"grad_norm": 21.961185455322266, |
|
"learning_rate": 8.456514496848938e-06, |
|
"loss": 1.7559, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.996, |
|
"grad_norm": 12.971484184265137, |
|
"learning_rate": 8.44809771867835e-06, |
|
"loss": 1.6592, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.998, |
|
"grad_norm": 12.674059867858887, |
|
"learning_rate": 8.439662267616093e-06, |
|
"loss": 1.6422, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 10.149175643920898, |
|
"learning_rate": 8.43120818934367e-06, |
|
"loss": 1.6264, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.002, |
|
"grad_norm": 14.11556625366211, |
|
"learning_rate": 8.422735529643445e-06, |
|
"loss": 1.5558, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.004, |
|
"grad_norm": 10.902586936950684, |
|
"learning_rate": 8.414244334398418e-06, |
|
"loss": 1.6304, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.006, |
|
"grad_norm": 9.201044082641602, |
|
"learning_rate": 8.405734649591964e-06, |
|
"loss": 1.7151, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 8.613140106201172, |
|
"learning_rate": 8.397206521307584e-06, |
|
"loss": 1.6446, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 11.33934497833252, |
|
"learning_rate": 8.388659995728662e-06, |
|
"loss": 1.5727, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.012, |
|
"grad_norm": 9.204126358032227, |
|
"learning_rate": 8.380095119138209e-06, |
|
"loss": 1.6427, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.014, |
|
"grad_norm": 13.722872734069824, |
|
"learning_rate": 8.371511937918616e-06, |
|
"loss": 1.5922, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.016, |
|
"grad_norm": 9.983489036560059, |
|
"learning_rate": 8.362910498551402e-06, |
|
"loss": 1.5911, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.018, |
|
"grad_norm": 6.286109447479248, |
|
"learning_rate": 8.354290847616954e-06, |
|
"loss": 1.581, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 8.981342315673828, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 1.6099, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.022, |
|
"grad_norm": 10.48516845703125, |
|
"learning_rate": 8.3369970978608e-06, |
|
"loss": 1.4918, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 10.683873176574707, |
|
"learning_rate": 8.328323092691985e-06, |
|
"loss": 1.7634, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.026, |
|
"grad_norm": 9.128839492797852, |
|
"learning_rate": 8.319631063261209e-06, |
|
"loss": 1.5248, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.028, |
|
"grad_norm": 7.288445949554443, |
|
"learning_rate": 8.310921056639451e-06, |
|
"loss": 1.597, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 7.43460750579834, |
|
"learning_rate": 8.302193119995038e-06, |
|
"loss": 1.469, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.032, |
|
"grad_norm": 11.071965217590332, |
|
"learning_rate": 8.293447300593402e-06, |
|
"loss": 1.6112, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.034, |
|
"grad_norm": 9.424286842346191, |
|
"learning_rate": 8.284683645796814e-06, |
|
"loss": 1.6145, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.036, |
|
"grad_norm": 10.420470237731934, |
|
"learning_rate": 8.275902203064125e-06, |
|
"loss": 1.7171, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.038, |
|
"grad_norm": 9.002021789550781, |
|
"learning_rate": 8.267103019950529e-06, |
|
"loss": 1.603, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 12.653961181640625, |
|
"learning_rate": 8.258286144107277e-06, |
|
"loss": 1.6203, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.042, |
|
"grad_norm": 7.975899696350098, |
|
"learning_rate": 8.249451623281444e-06, |
|
"loss": 1.5552, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.044, |
|
"grad_norm": 7.326491832733154, |
|
"learning_rate": 8.240599505315656e-06, |
|
"loss": 1.6063, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.046, |
|
"grad_norm": 7.846243381500244, |
|
"learning_rate": 8.231729838147833e-06, |
|
"loss": 1.5635, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.048, |
|
"grad_norm": 9.442638397216797, |
|
"learning_rate": 8.222842669810936e-06, |
|
"loss": 1.5853, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 8.490560531616211, |
|
"learning_rate": 8.213938048432697e-06, |
|
"loss": 1.5719, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.052, |
|
"grad_norm": 9.718538284301758, |
|
"learning_rate": 8.205016022235368e-06, |
|
"loss": 1.6211, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.054, |
|
"grad_norm": 8.703619956970215, |
|
"learning_rate": 8.196076639535453e-06, |
|
"loss": 1.6081, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 11.14815902709961, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 1.6158, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.058, |
|
"grad_norm": 9.741596221923828, |
|
"learning_rate": 8.178145998363585e-06, |
|
"loss": 1.5314, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 9.530781745910645, |
|
"learning_rate": 8.16915483699355e-06, |
|
"loss": 1.5866, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.062, |
|
"grad_norm": 8.731345176696777, |
|
"learning_rate": 8.160146513324256e-06, |
|
"loss": 1.5666, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.064, |
|
"grad_norm": 11.338554382324219, |
|
"learning_rate": 8.151121076139534e-06, |
|
"loss": 1.5834, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.066, |
|
"grad_norm": 20.54311752319336, |
|
"learning_rate": 8.142078574315907e-06, |
|
"loss": 1.5652, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.068, |
|
"grad_norm": 11.977243423461914, |
|
"learning_rate": 8.133019056822303e-06, |
|
"loss": 1.4832, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 10.514939308166504, |
|
"learning_rate": 8.123942572719801e-06, |
|
"loss": 1.6011, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.072, |
|
"grad_norm": 9.276664733886719, |
|
"learning_rate": 8.11484917116136e-06, |
|
"loss": 1.5909, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.074, |
|
"grad_norm": 11.073919296264648, |
|
"learning_rate": 8.105738901391553e-06, |
|
"loss": 1.5826, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.076, |
|
"grad_norm": 9.078001022338867, |
|
"learning_rate": 8.096611812746302e-06, |
|
"loss": 1.641, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.078, |
|
"grad_norm": 10.105594635009766, |
|
"learning_rate": 8.087467954652608e-06, |
|
"loss": 1.6196, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 13.397587776184082, |
|
"learning_rate": 8.078307376628292e-06, |
|
"loss": 1.562, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.082, |
|
"grad_norm": 13.389184951782227, |
|
"learning_rate": 8.069130128281714e-06, |
|
"loss": 1.5884, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.084, |
|
"grad_norm": 10.954330444335938, |
|
"learning_rate": 8.059936259311514e-06, |
|
"loss": 1.6104, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.086, |
|
"grad_norm": 8.864795684814453, |
|
"learning_rate": 8.05072581950634e-06, |
|
"loss": 1.6024, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 9.769396781921387, |
|
"learning_rate": 8.041498858744572e-06, |
|
"loss": 1.553, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 8.69478988647461, |
|
"learning_rate": 8.032255426994069e-06, |
|
"loss": 1.6915, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.092, |
|
"grad_norm": 8.354500770568848, |
|
"learning_rate": 8.022995574311876e-06, |
|
"loss": 1.5773, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.094, |
|
"grad_norm": 9.368426322937012, |
|
"learning_rate": 8.013719350843969e-06, |
|
"loss": 1.6233, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.096, |
|
"grad_norm": 8.086129188537598, |
|
"learning_rate": 8.004426806824985e-06, |
|
"loss": 1.4905, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.098, |
|
"grad_norm": 8.991674423217773, |
|
"learning_rate": 7.99511799257793e-06, |
|
"loss": 1.4947, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 9.440938949584961, |
|
"learning_rate": 7.985792958513932e-06, |
|
"loss": 1.6062, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.102, |
|
"grad_norm": 7.645297527313232, |
|
"learning_rate": 7.97645175513195e-06, |
|
"loss": 1.5178, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 8.402973175048828, |
|
"learning_rate": 7.967094433018508e-06, |
|
"loss": 1.571, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.106, |
|
"grad_norm": 9.366331100463867, |
|
"learning_rate": 7.95772104284742e-06, |
|
"loss": 1.4459, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.108, |
|
"grad_norm": 9.664446830749512, |
|
"learning_rate": 7.948331635379517e-06, |
|
"loss": 1.5755, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 10.556148529052734, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 1.5738, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.112, |
|
"grad_norm": 10.46329116821289, |
|
"learning_rate": 7.929504972030003e-06, |
|
"loss": 1.7149, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.114, |
|
"grad_norm": 9.829599380493164, |
|
"learning_rate": 7.920067818102652e-06, |
|
"loss": 1.5629, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.116, |
|
"grad_norm": 8.370344161987305, |
|
"learning_rate": 7.910614850786448e-06, |
|
"loss": 1.6013, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.1179999999999999, |
|
"grad_norm": 9.646671295166016, |
|
"learning_rate": 7.901146121273165e-06, |
|
"loss": 1.6073, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 8.669224739074707, |
|
"learning_rate": 7.891661680839932e-06, |
|
"loss": 1.5163, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1219999999999999, |
|
"grad_norm": 11.22138500213623, |
|
"learning_rate": 7.882161580848966e-06, |
|
"loss": 1.6723, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.124, |
|
"grad_norm": 9.086870193481445, |
|
"learning_rate": 7.872645872747281e-06, |
|
"loss": 1.4885, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.126, |
|
"grad_norm": 11.861745834350586, |
|
"learning_rate": 7.863114608066417e-06, |
|
"loss": 1.6129, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.1280000000000001, |
|
"grad_norm": 7.978770732879639, |
|
"learning_rate": 7.85356783842216e-06, |
|
"loss": 1.6875, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 9.279024124145508, |
|
"learning_rate": 7.84400561551426e-06, |
|
"loss": 1.5874, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.1320000000000001, |
|
"grad_norm": 10.196725845336914, |
|
"learning_rate": 7.834427991126155e-06, |
|
"loss": 1.5669, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.134, |
|
"grad_norm": 12.994475364685059, |
|
"learning_rate": 7.82483501712469e-06, |
|
"loss": 1.6626, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.1360000000000001, |
|
"grad_norm": 11.740025520324707, |
|
"learning_rate": 7.815226745459831e-06, |
|
"loss": 1.5378, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.138, |
|
"grad_norm": 8.01264762878418, |
|
"learning_rate": 7.80560322816439e-06, |
|
"loss": 1.592, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.1400000000000001, |
|
"grad_norm": 9.302233695983887, |
|
"learning_rate": 7.795964517353734e-06, |
|
"loss": 1.5832, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.142, |
|
"grad_norm": 9.458878517150879, |
|
"learning_rate": 7.786310665225522e-06, |
|
"loss": 1.6201, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.144, |
|
"grad_norm": 19.037525177001953, |
|
"learning_rate": 7.776641724059398e-06, |
|
"loss": 1.7447, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.146, |
|
"grad_norm": 11.674372673034668, |
|
"learning_rate": 7.76695774621672e-06, |
|
"loss": 1.6556, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.148, |
|
"grad_norm": 11.437299728393555, |
|
"learning_rate": 7.757258784140286e-06, |
|
"loss": 1.6109, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 10.853487014770508, |
|
"learning_rate": 7.747544890354031e-06, |
|
"loss": 1.5528, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 6.265218734741211, |
|
"learning_rate": 7.737816117462752e-06, |
|
"loss": 1.5691, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.154, |
|
"grad_norm": 8.988601684570312, |
|
"learning_rate": 7.728072518151826e-06, |
|
"loss": 1.646, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.156, |
|
"grad_norm": 10.684486389160156, |
|
"learning_rate": 7.718314145186918e-06, |
|
"loss": 1.6239, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.158, |
|
"grad_norm": 8.397745132446289, |
|
"learning_rate": 7.7085410514137e-06, |
|
"loss": 1.5766, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 8.635762214660645, |
|
"learning_rate": 7.698753289757565e-06, |
|
"loss": 1.5302, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.162, |
|
"grad_norm": 11.5667085647583, |
|
"learning_rate": 7.688950913223336e-06, |
|
"loss": 1.5859, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.164, |
|
"grad_norm": 6.988083839416504, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 1.6023, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.166, |
|
"grad_norm": 8.85438346862793, |
|
"learning_rate": 7.669302527935334e-06, |
|
"loss": 1.7383, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.168, |
|
"grad_norm": 8.871450424194336, |
|
"learning_rate": 7.65945662558579e-06, |
|
"loss": 1.5443, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 6.740708827972412, |
|
"learning_rate": 7.649596321166024e-06, |
|
"loss": 1.581, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.172, |
|
"grad_norm": 7.056929111480713, |
|
"learning_rate": 7.639721668073718e-06, |
|
"loss": 1.4831, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.174, |
|
"grad_norm": 9.177947998046875, |
|
"learning_rate": 7.629832719784245e-06, |
|
"loss": 1.5719, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.176, |
|
"grad_norm": 9.50424861907959, |
|
"learning_rate": 7.619929529850397e-06, |
|
"loss": 1.5951, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.178, |
|
"grad_norm": 7.7955498695373535, |
|
"learning_rate": 7.610012151902091e-06, |
|
"loss": 1.6338, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 9.096726417541504, |
|
"learning_rate": 7.600080639646077e-06, |
|
"loss": 1.6591, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.182, |
|
"grad_norm": 8.232474327087402, |
|
"learning_rate": 7.590135046865652e-06, |
|
"loss": 1.4726, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.184, |
|
"grad_norm": 8.038969039916992, |
|
"learning_rate": 7.580175427420358e-06, |
|
"loss": 1.7216, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.186, |
|
"grad_norm": 10.49866771697998, |
|
"learning_rate": 7.570201835245703e-06, |
|
"loss": 1.55, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.188, |
|
"grad_norm": 8.377806663513184, |
|
"learning_rate": 7.560214324352858e-06, |
|
"loss": 1.5407, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 11.32930850982666, |
|
"learning_rate": 7.550212948828377e-06, |
|
"loss": 1.5192, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.192, |
|
"grad_norm": 9.1795015335083, |
|
"learning_rate": 7.54019776283389e-06, |
|
"loss": 1.5734, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.194, |
|
"grad_norm": 8.019397735595703, |
|
"learning_rate": 7.530168820605819e-06, |
|
"loss": 1.5216, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.196, |
|
"grad_norm": 25.26226234436035, |
|
"learning_rate": 7.520126176455084e-06, |
|
"loss": 1.6976, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.198, |
|
"grad_norm": 9.354122161865234, |
|
"learning_rate": 7.510069884766802e-06, |
|
"loss": 1.5146, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 6.69480037689209, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.5678, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.202, |
|
"grad_norm": 8.49975299835205, |
|
"learning_rate": 7.489916576687318e-06, |
|
"loss": 1.5507, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.204, |
|
"grad_norm": 6.0479416847229, |
|
"learning_rate": 7.479819669434712e-06, |
|
"loss": 1.5298, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.206, |
|
"grad_norm": 12.379949569702148, |
|
"learning_rate": 7.469709332921155e-06, |
|
"loss": 1.5703, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.208, |
|
"grad_norm": 9.304234504699707, |
|
"learning_rate": 7.459585621898353e-06, |
|
"loss": 1.4965, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 8.749626159667969, |
|
"learning_rate": 7.449448591190436e-06, |
|
"loss": 1.5667, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.212, |
|
"grad_norm": 12.647852897644043, |
|
"learning_rate": 7.4392982956936644e-06, |
|
"loss": 1.518, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.214, |
|
"grad_norm": 11.38586139678955, |
|
"learning_rate": 7.429134790376136e-06, |
|
"loss": 1.5827, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 9.004302978515625, |
|
"learning_rate": 7.418958130277483e-06, |
|
"loss": 1.5788, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.218, |
|
"grad_norm": 12.129622459411621, |
|
"learning_rate": 7.408768370508577e-06, |
|
"loss": 1.6698, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 8.174181938171387, |
|
"learning_rate": 7.398565566251232e-06, |
|
"loss": 1.6104, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.222, |
|
"grad_norm": 9.542710304260254, |
|
"learning_rate": 7.3883497727579e-06, |
|
"loss": 1.527, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.224, |
|
"grad_norm": 8.519256591796875, |
|
"learning_rate": 7.378121045351378e-06, |
|
"loss": 1.6646, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.226, |
|
"grad_norm": 10.969525337219238, |
|
"learning_rate": 7.3678794394245e-06, |
|
"loss": 1.5838, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.228, |
|
"grad_norm": 12.213068008422852, |
|
"learning_rate": 7.357625010439853e-06, |
|
"loss": 1.5068, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 14.603283882141113, |
|
"learning_rate": 7.347357813929455e-06, |
|
"loss": 1.7072, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.232, |
|
"grad_norm": 9.047625541687012, |
|
"learning_rate": 7.337077905494472e-06, |
|
"loss": 1.5516, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.234, |
|
"grad_norm": 8.961601257324219, |
|
"learning_rate": 7.326785340804908e-06, |
|
"loss": 1.6053, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.236, |
|
"grad_norm": 10.050729751586914, |
|
"learning_rate": 7.31648017559931e-06, |
|
"loss": 1.5849, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.238, |
|
"grad_norm": 6.469438076019287, |
|
"learning_rate": 7.3061624656844544e-06, |
|
"loss": 1.6075, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 11.897672653198242, |
|
"learning_rate": 7.295832266935059e-06, |
|
"loss": 1.5038, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.242, |
|
"grad_norm": 7.534898281097412, |
|
"learning_rate": 7.285489635293472e-06, |
|
"loss": 1.5344, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.244, |
|
"grad_norm": 8.457494735717773, |
|
"learning_rate": 7.275134626769369e-06, |
|
"loss": 1.6063, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.246, |
|
"grad_norm": 7.499537467956543, |
|
"learning_rate": 7.264767297439455e-06, |
|
"loss": 1.5974, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 10.001435279846191, |
|
"learning_rate": 7.254387703447154e-06, |
|
"loss": 1.6185, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 10.439517974853516, |
|
"learning_rate": 7.243995901002312e-06, |
|
"loss": 1.6371, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.252, |
|
"grad_norm": 8.107089042663574, |
|
"learning_rate": 7.233591946380884e-06, |
|
"loss": 1.5729, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.254, |
|
"grad_norm": 9.131059646606445, |
|
"learning_rate": 7.223175895924638e-06, |
|
"loss": 1.5677, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.256, |
|
"grad_norm": 6.757850646972656, |
|
"learning_rate": 7.212747806040845e-06, |
|
"loss": 1.5922, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.258, |
|
"grad_norm": 9.472972869873047, |
|
"learning_rate": 7.2023077332019755e-06, |
|
"loss": 1.6055, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 7.784339427947998, |
|
"learning_rate": 7.191855733945388e-06, |
|
"loss": 1.461, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.262, |
|
"grad_norm": 9.098430633544922, |
|
"learning_rate": 7.181391864873034e-06, |
|
"loss": 1.5214, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.264, |
|
"grad_norm": 8.37144947052002, |
|
"learning_rate": 7.170916182651141e-06, |
|
"loss": 1.5766, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.266, |
|
"grad_norm": 9.366813659667969, |
|
"learning_rate": 7.160428744009913e-06, |
|
"loss": 1.5955, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.268, |
|
"grad_norm": 11.189432144165039, |
|
"learning_rate": 7.149929605743214e-06, |
|
"loss": 1.6196, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 9.027175903320312, |
|
"learning_rate": 7.1394188247082715e-06, |
|
"loss": 1.5104, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.272, |
|
"grad_norm": 7.792656898498535, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 1.6022, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.274, |
|
"grad_norm": 7.127891540527344, |
|
"learning_rate": 7.118362562077508e-06, |
|
"loss": 1.624, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.276, |
|
"grad_norm": 7.897225379943848, |
|
"learning_rate": 7.107817194510157e-06, |
|
"loss": 1.6961, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.278, |
|
"grad_norm": 8.84518051147461, |
|
"learning_rate": 7.0972604122308865e-06, |
|
"loss": 1.4578, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 7.849598407745361, |
|
"learning_rate": 7.08669227240909e-06, |
|
"loss": 1.6101, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.282, |
|
"grad_norm": 9.6469144821167, |
|
"learning_rate": 7.076112832275667e-06, |
|
"loss": 1.6093, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.284, |
|
"grad_norm": 5.705495834350586, |
|
"learning_rate": 7.06552214912271e-06, |
|
"loss": 1.5104, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.286, |
|
"grad_norm": 7.658092975616455, |
|
"learning_rate": 7.054920280303199e-06, |
|
"loss": 1.666, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.288, |
|
"grad_norm": 7.542886734008789, |
|
"learning_rate": 7.04430728323069e-06, |
|
"loss": 1.6318, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 9.100532531738281, |
|
"learning_rate": 7.033683215379002e-06, |
|
"loss": 1.6983, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.292, |
|
"grad_norm": 7.658532619476318, |
|
"learning_rate": 7.023048134281907e-06, |
|
"loss": 1.5868, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.294, |
|
"grad_norm": 10.516380310058594, |
|
"learning_rate": 7.012402097532815e-06, |
|
"loss": 1.489, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 7.941004276275635, |
|
"learning_rate": 7.0017451627844765e-06, |
|
"loss": 1.6114, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.298, |
|
"grad_norm": 8.073655128479004, |
|
"learning_rate": 6.991077387748643e-06, |
|
"loss": 1.6023, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 8.82116985321045, |
|
"learning_rate": 6.980398830195785e-06, |
|
"loss": 1.5371, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.302, |
|
"grad_norm": 10.482192039489746, |
|
"learning_rate": 6.9697095479547564e-06, |
|
"loss": 1.6303, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.304, |
|
"grad_norm": 12.892633438110352, |
|
"learning_rate": 6.959009598912493e-06, |
|
"loss": 1.4729, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.306, |
|
"grad_norm": 7.209946632385254, |
|
"learning_rate": 6.948299041013695e-06, |
|
"loss": 1.493, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.308, |
|
"grad_norm": 7.52318811416626, |
|
"learning_rate": 6.9375779322605154e-06, |
|
"loss": 1.5806, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 10.265365600585938, |
|
"learning_rate": 6.9268463307122425e-06, |
|
"loss": 1.5159, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.312, |
|
"grad_norm": 10.420729637145996, |
|
"learning_rate": 6.916104294484988e-06, |
|
"loss": 1.5097, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.314, |
|
"grad_norm": 7.465059280395508, |
|
"learning_rate": 6.905351881751372e-06, |
|
"loss": 1.6499, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.316, |
|
"grad_norm": 9.007606506347656, |
|
"learning_rate": 6.8945891507402075e-06, |
|
"loss": 1.5783, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.318, |
|
"grad_norm": 11.8942289352417, |
|
"learning_rate": 6.883816159736187e-06, |
|
"loss": 1.6481, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 10.41662311553955, |
|
"learning_rate": 6.873032967079562e-06, |
|
"loss": 1.6615, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.322, |
|
"grad_norm": 11.450822830200195, |
|
"learning_rate": 6.862239631165831e-06, |
|
"loss": 1.6254, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.324, |
|
"grad_norm": 8.604270935058594, |
|
"learning_rate": 6.851436210445427e-06, |
|
"loss": 1.6191, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.326, |
|
"grad_norm": 7.488931655883789, |
|
"learning_rate": 6.840622763423391e-06, |
|
"loss": 1.6588, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.328, |
|
"grad_norm": 9.091219902038574, |
|
"learning_rate": 6.829799348659061e-06, |
|
"loss": 1.6639, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 8.698543548583984, |
|
"learning_rate": 6.818966024765758e-06, |
|
"loss": 1.5772, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.332, |
|
"grad_norm": 13.658153533935547, |
|
"learning_rate": 6.808122850410461e-06, |
|
"loss": 1.4672, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.334, |
|
"grad_norm": 26.972639083862305, |
|
"learning_rate": 6.7972698843135e-06, |
|
"loss": 1.6459, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.336, |
|
"grad_norm": 9.072081565856934, |
|
"learning_rate": 6.7864071852482205e-06, |
|
"loss": 1.6711, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.338, |
|
"grad_norm": 8.552204132080078, |
|
"learning_rate": 6.775534812040686e-06, |
|
"loss": 1.5927, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 9.554261207580566, |
|
"learning_rate": 6.7646528235693445e-06, |
|
"loss": 1.6078, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.342, |
|
"grad_norm": 9.100646018981934, |
|
"learning_rate": 6.753761278764719e-06, |
|
"loss": 1.6746, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 10.204363822937012, |
|
"learning_rate": 6.7428602366090764e-06, |
|
"loss": 1.626, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.346, |
|
"grad_norm": 10.063047409057617, |
|
"learning_rate": 6.7319497561361245e-06, |
|
"loss": 1.6601, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.3479999999999999, |
|
"grad_norm": 13.726625442504883, |
|
"learning_rate": 6.721029896430678e-06, |
|
"loss": 1.583, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 11.784052848815918, |
|
"learning_rate": 6.710100716628345e-06, |
|
"loss": 1.6688, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.3519999999999999, |
|
"grad_norm": 8.559849739074707, |
|
"learning_rate": 6.699162275915208e-06, |
|
"loss": 1.6578, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.354, |
|
"grad_norm": 9.396211624145508, |
|
"learning_rate": 6.6882146335274955e-06, |
|
"loss": 1.6294, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.3559999999999999, |
|
"grad_norm": 7.74012565612793, |
|
"learning_rate": 6.677257848751276e-06, |
|
"loss": 1.5466, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.358, |
|
"grad_norm": 10.365443229675293, |
|
"learning_rate": 6.666291980922122e-06, |
|
"loss": 1.5296, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 7.977329254150391, |
|
"learning_rate": 6.655317089424791e-06, |
|
"loss": 1.5837, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.362, |
|
"grad_norm": 8.795341491699219, |
|
"learning_rate": 6.644333233692917e-06, |
|
"loss": 1.5884, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.3639999999999999, |
|
"grad_norm": 13.247668266296387, |
|
"learning_rate": 6.633340473208673e-06, |
|
"loss": 1.5977, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.366, |
|
"grad_norm": 8.479852676391602, |
|
"learning_rate": 6.622338867502452e-06, |
|
"loss": 1.5983, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3679999999999999, |
|
"grad_norm": 8.384832382202148, |
|
"learning_rate": 6.611328476152557e-06, |
|
"loss": 1.5377, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 7.661881923675537, |
|
"learning_rate": 6.600309358784858e-06, |
|
"loss": 1.4785, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3719999999999999, |
|
"grad_norm": 9.14194107055664, |
|
"learning_rate": 6.58928157507249e-06, |
|
"loss": 1.5439, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.374, |
|
"grad_norm": 8.849069595336914, |
|
"learning_rate": 6.578245184735513e-06, |
|
"loss": 1.5466, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.376, |
|
"grad_norm": 8.54023551940918, |
|
"learning_rate": 6.567200247540599e-06, |
|
"loss": 1.5738, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.3780000000000001, |
|
"grad_norm": 13.907352447509766, |
|
"learning_rate": 6.556146823300701e-06, |
|
"loss": 1.6541, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 10.435159683227539, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 1.6579, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3820000000000001, |
|
"grad_norm": 9.772438049316406, |
|
"learning_rate": 6.534014753167263e-06, |
|
"loss": 1.6995, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.384, |
|
"grad_norm": 9.34780502319336, |
|
"learning_rate": 6.522936227128139e-06, |
|
"loss": 1.5157, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.3860000000000001, |
|
"grad_norm": 8.92451000213623, |
|
"learning_rate": 6.5118494537522235e-06, |
|
"loss": 1.6148, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.388, |
|
"grad_norm": 6.892347812652588, |
|
"learning_rate": 6.500754493079029e-06, |
|
"loss": 1.5511, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.3900000000000001, |
|
"grad_norm": 10.89332389831543, |
|
"learning_rate": 6.48965140519241e-06, |
|
"loss": 1.5804, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 7.6322221755981445, |
|
"learning_rate": 6.4785402502202345e-06, |
|
"loss": 1.6086, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.3940000000000001, |
|
"grad_norm": 8.542652130126953, |
|
"learning_rate": 6.467421088334052e-06, |
|
"loss": 1.6496, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.396, |
|
"grad_norm": 8.015932083129883, |
|
"learning_rate": 6.456293979748778e-06, |
|
"loss": 1.617, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.3980000000000001, |
|
"grad_norm": 7.57454776763916, |
|
"learning_rate": 6.445158984722358e-06, |
|
"loss": 1.567, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 8.950959205627441, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 1.5682, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4020000000000001, |
|
"grad_norm": 8.991364479064941, |
|
"learning_rate": 6.422865576591096e-06, |
|
"loss": 1.5012, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.404, |
|
"grad_norm": 7.7304182052612305, |
|
"learning_rate": 6.411707284214384e-06, |
|
"loss": 1.5961, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.4060000000000001, |
|
"grad_norm": 7.197605609893799, |
|
"learning_rate": 6.400541346852136e-06, |
|
"loss": 1.6654, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 7.552187442779541, |
|
"learning_rate": 6.389367824972575e-06, |
|
"loss": 1.5137, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 8.499285697937012, |
|
"learning_rate": 6.378186779084996e-06, |
|
"loss": 1.5841, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.412, |
|
"grad_norm": 9.511728286743164, |
|
"learning_rate": 6.366998269739442e-06, |
|
"loss": 1.5504, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.414, |
|
"grad_norm": 8.45433521270752, |
|
"learning_rate": 6.35580235752637e-06, |
|
"loss": 1.5983, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.416, |
|
"grad_norm": 9.685602188110352, |
|
"learning_rate": 6.344599103076329e-06, |
|
"loss": 1.5785, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.418, |
|
"grad_norm": 9.533397674560547, |
|
"learning_rate": 6.3333885670596285e-06, |
|
"loss": 1.5921, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 10.1636381149292, |
|
"learning_rate": 6.322170810186013e-06, |
|
"loss": 1.6896, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.422, |
|
"grad_norm": 9.969033241271973, |
|
"learning_rate": 6.310945893204324e-06, |
|
"loss": 1.6277, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.424, |
|
"grad_norm": 13.378399848937988, |
|
"learning_rate": 6.299713876902188e-06, |
|
"loss": 1.6618, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.426, |
|
"grad_norm": 7.862060546875, |
|
"learning_rate": 6.28847482210567e-06, |
|
"loss": 1.5287, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.428, |
|
"grad_norm": 8.11763858795166, |
|
"learning_rate": 6.277228789678953e-06, |
|
"loss": 1.6417, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 8.063044548034668, |
|
"learning_rate": 6.26597584052401e-06, |
|
"loss": 1.6309, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.432, |
|
"grad_norm": 6.405232906341553, |
|
"learning_rate": 6.254716035580264e-06, |
|
"loss": 1.5913, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.434, |
|
"grad_norm": 6.900762557983398, |
|
"learning_rate": 6.243449435824276e-06, |
|
"loss": 1.5563, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.436, |
|
"grad_norm": 10.574906349182129, |
|
"learning_rate": 6.23217610226939e-06, |
|
"loss": 1.601, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.438, |
|
"grad_norm": 7.271671295166016, |
|
"learning_rate": 6.220896095965428e-06, |
|
"loss": 1.5809, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 8.701942443847656, |
|
"learning_rate": 6.209609477998339e-06, |
|
"loss": 1.588, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.442, |
|
"grad_norm": 8.735617637634277, |
|
"learning_rate": 6.198316309489886e-06, |
|
"loss": 1.5435, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.444, |
|
"grad_norm": 7.150242328643799, |
|
"learning_rate": 6.187016651597299e-06, |
|
"loss": 1.6309, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.446, |
|
"grad_norm": 9.113722801208496, |
|
"learning_rate": 6.17571056551295e-06, |
|
"loss": 1.6128, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.448, |
|
"grad_norm": 9.70335865020752, |
|
"learning_rate": 6.16439811246403e-06, |
|
"loss": 1.5812, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 7.478845596313477, |
|
"learning_rate": 6.153079353712201e-06, |
|
"loss": 1.653, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.452, |
|
"grad_norm": 7.009204864501953, |
|
"learning_rate": 6.141754350553279e-06, |
|
"loss": 1.432, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.454, |
|
"grad_norm": 8.384693145751953, |
|
"learning_rate": 6.130423164316893e-06, |
|
"loss": 1.6201, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.456, |
|
"grad_norm": 12.53249454498291, |
|
"learning_rate": 6.119085856366158e-06, |
|
"loss": 1.6526, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.458, |
|
"grad_norm": 7.682470321655273, |
|
"learning_rate": 6.107742488097338e-06, |
|
"loss": 1.5562, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 10.752216339111328, |
|
"learning_rate": 6.0963931209395165e-06, |
|
"loss": 1.6478, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.462, |
|
"grad_norm": 9.087852478027344, |
|
"learning_rate": 6.085037816354269e-06, |
|
"loss": 1.5929, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.464, |
|
"grad_norm": 7.526221752166748, |
|
"learning_rate": 6.073676635835317e-06, |
|
"loss": 1.588, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.466, |
|
"grad_norm": 7.148344993591309, |
|
"learning_rate": 6.062309640908206e-06, |
|
"loss": 1.575, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.468, |
|
"grad_norm": 10.243959426879883, |
|
"learning_rate": 6.05093689312997e-06, |
|
"loss": 1.5805, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 8.489969253540039, |
|
"learning_rate": 6.039558454088796e-06, |
|
"loss": 1.4469, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 7.80888557434082, |
|
"learning_rate": 6.028174385403693e-06, |
|
"loss": 1.6703, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.474, |
|
"grad_norm": 8.337100982666016, |
|
"learning_rate": 6.016784748724153e-06, |
|
"loss": 1.5589, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.476, |
|
"grad_norm": 7.308524131774902, |
|
"learning_rate": 6.005389605729824e-06, |
|
"loss": 1.5458, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.478, |
|
"grad_norm": 10.208418846130371, |
|
"learning_rate": 5.993989018130173e-06, |
|
"loss": 1.5389, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 6.792656421661377, |
|
"learning_rate": 5.982583047664151e-06, |
|
"loss": 1.574, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.482, |
|
"grad_norm": 7.216954708099365, |
|
"learning_rate": 5.97117175609986e-06, |
|
"loss": 1.6285, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.484, |
|
"grad_norm": 7.5151238441467285, |
|
"learning_rate": 5.9597552052342174e-06, |
|
"loss": 1.536, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.486, |
|
"grad_norm": 8.329843521118164, |
|
"learning_rate": 5.948333456892624e-06, |
|
"loss": 1.564, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 7.076045036315918, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 1.5722, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 8.269755363464355, |
|
"learning_rate": 5.925474615223573e-06, |
|
"loss": 1.6673, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.492, |
|
"grad_norm": 9.766873359680176, |
|
"learning_rate": 5.914037645686308e-06, |
|
"loss": 1.4451, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.494, |
|
"grad_norm": 7.705737590789795, |
|
"learning_rate": 5.902595726252801e-06, |
|
"loss": 1.7146, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.496, |
|
"grad_norm": 6.665014266967773, |
|
"learning_rate": 5.891148918885834e-06, |
|
"loss": 1.498, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.498, |
|
"grad_norm": 8.403691291809082, |
|
"learning_rate": 5.879697285574655e-06, |
|
"loss": 1.6839, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 6.893798351287842, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 1.535, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.502, |
|
"grad_norm": 7.8562421798706055, |
|
"learning_rate": 5.85677978920701e-06, |
|
"loss": 1.5439, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.504, |
|
"grad_norm": 9.73377513885498, |
|
"learning_rate": 5.84531405025837e-06, |
|
"loss": 1.576, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.506, |
|
"grad_norm": 9.49201774597168, |
|
"learning_rate": 5.8338437335805124e-06, |
|
"loss": 1.7001, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.508, |
|
"grad_norm": 7.50467586517334, |
|
"learning_rate": 5.8223689012899945e-06, |
|
"loss": 1.6096, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 7.861015319824219, |
|
"learning_rate": 5.810889615527839e-06, |
|
"loss": 1.6206, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.512, |
|
"grad_norm": 9.48017692565918, |
|
"learning_rate": 5.799405938459175e-06, |
|
"loss": 1.5395, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.514, |
|
"grad_norm": 8.29472827911377, |
|
"learning_rate": 5.787917932272922e-06, |
|
"loss": 1.567, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.516, |
|
"grad_norm": 9.242623329162598, |
|
"learning_rate": 5.776425659181438e-06, |
|
"loss": 1.4826, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.518, |
|
"grad_norm": 8.55409049987793, |
|
"learning_rate": 5.764929181420191e-06, |
|
"loss": 1.462, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 8.088449478149414, |
|
"learning_rate": 5.753428561247416e-06, |
|
"loss": 1.6413, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.522, |
|
"grad_norm": 7.811099529266357, |
|
"learning_rate": 5.741923860943783e-06, |
|
"loss": 1.5257, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.524, |
|
"grad_norm": 7.643144130706787, |
|
"learning_rate": 5.730415142812059e-06, |
|
"loss": 1.638, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.526, |
|
"grad_norm": 8.378085136413574, |
|
"learning_rate": 5.718902469176765e-06, |
|
"loss": 1.6547, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.528, |
|
"grad_norm": 8.745218276977539, |
|
"learning_rate": 5.707385902383845e-06, |
|
"loss": 1.5213, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 11.519359588623047, |
|
"learning_rate": 5.695865504800328e-06, |
|
"loss": 1.6252, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.532, |
|
"grad_norm": 9.585226058959961, |
|
"learning_rate": 5.684341338813986e-06, |
|
"loss": 1.5071, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.534, |
|
"grad_norm": 7.059016704559326, |
|
"learning_rate": 5.672813466832998e-06, |
|
"loss": 1.6355, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 7.046347618103027, |
|
"learning_rate": 5.661281951285613e-06, |
|
"loss": 1.7801, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.538, |
|
"grad_norm": 6.897932052612305, |
|
"learning_rate": 5.649746854619814e-06, |
|
"loss": 1.5833, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 8.469090461730957, |
|
"learning_rate": 5.638208239302975e-06, |
|
"loss": 1.6203, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.542, |
|
"grad_norm": 10.167533874511719, |
|
"learning_rate": 5.626666167821522e-06, |
|
"loss": 1.5861, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.544, |
|
"grad_norm": 7.769595146179199, |
|
"learning_rate": 5.615120702680604e-06, |
|
"loss": 1.5514, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.546, |
|
"grad_norm": 7.826175689697266, |
|
"learning_rate": 5.6035719064037446e-06, |
|
"loss": 1.6232, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.548, |
|
"grad_norm": 7.963818550109863, |
|
"learning_rate": 5.592019841532507e-06, |
|
"loss": 1.5703, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 8.253701210021973, |
|
"learning_rate": 5.5804645706261515e-06, |
|
"loss": 1.5996, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.552, |
|
"grad_norm": 10.267999649047852, |
|
"learning_rate": 5.568906156261309e-06, |
|
"loss": 1.5394, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.554, |
|
"grad_norm": 9.601211547851562, |
|
"learning_rate": 5.557344661031628e-06, |
|
"loss": 1.511, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.556, |
|
"grad_norm": 9.681934356689453, |
|
"learning_rate": 5.54578014754744e-06, |
|
"loss": 1.5503, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.558, |
|
"grad_norm": 8.93424129486084, |
|
"learning_rate": 5.5342126784354265e-06, |
|
"loss": 1.561, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 9.094952583312988, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 1.5146, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.562, |
|
"grad_norm": 8.412158012390137, |
|
"learning_rate": 5.511069123914319e-06, |
|
"loss": 1.6634, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.564, |
|
"grad_norm": 9.692502975463867, |
|
"learning_rate": 5.499493163837258e-06, |
|
"loss": 1.5547, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.5659999999999998, |
|
"grad_norm": 6.936313152313232, |
|
"learning_rate": 5.487914498795748e-06, |
|
"loss": 1.5515, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.568, |
|
"grad_norm": 7.49924373626709, |
|
"learning_rate": 5.476333191493108e-06, |
|
"loss": 1.5958, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.5699999999999998, |
|
"grad_norm": 7.130555152893066, |
|
"learning_rate": 5.464749304646963e-06, |
|
"loss": 1.607, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.572, |
|
"grad_norm": 8.909390449523926, |
|
"learning_rate": 5.453162900988902e-06, |
|
"loss": 1.5895, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.5739999999999998, |
|
"grad_norm": 9.79128360748291, |
|
"learning_rate": 5.44157404326415e-06, |
|
"loss": 1.6367, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.576, |
|
"grad_norm": 8.873651504516602, |
|
"learning_rate": 5.429982794231221e-06, |
|
"loss": 1.5858, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5779999999999998, |
|
"grad_norm": 9.033736228942871, |
|
"learning_rate": 5.41838921666158e-06, |
|
"loss": 1.6132, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 8.515612602233887, |
|
"learning_rate": 5.406793373339292e-06, |
|
"loss": 1.5178, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5819999999999999, |
|
"grad_norm": 8.608535766601562, |
|
"learning_rate": 5.395195327060707e-06, |
|
"loss": 1.5546, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 9.189397811889648, |
|
"learning_rate": 5.383595140634093e-06, |
|
"loss": 1.64, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.5859999999999999, |
|
"grad_norm": 9.675806045532227, |
|
"learning_rate": 5.371992876879318e-06, |
|
"loss": 1.6044, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.588, |
|
"grad_norm": 10.594379425048828, |
|
"learning_rate": 5.360388598627487e-06, |
|
"loss": 1.6525, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.5899999999999999, |
|
"grad_norm": 7.640488624572754, |
|
"learning_rate": 5.348782368720627e-06, |
|
"loss": 1.5487, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.592, |
|
"grad_norm": 8.902362823486328, |
|
"learning_rate": 5.337174250011326e-06, |
|
"loss": 1.5257, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.5939999999999999, |
|
"grad_norm": 9.681800842285156, |
|
"learning_rate": 5.325564305362404e-06, |
|
"loss": 1.5424, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.596, |
|
"grad_norm": 7.919555187225342, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 1.4885, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.5979999999999999, |
|
"grad_norm": 11.47796630859375, |
|
"learning_rate": 5.3023391897460715e-06, |
|
"loss": 1.6287, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 8.65668773651123, |
|
"learning_rate": 5.290724144552379e-06, |
|
"loss": 1.6287, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.6019999999999999, |
|
"grad_norm": 8.334498405456543, |
|
"learning_rate": 5.27910752496582e-06, |
|
"loss": 1.5546, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.604, |
|
"grad_norm": 9.96323013305664, |
|
"learning_rate": 5.267489393895247e-06, |
|
"loss": 1.658, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.6059999999999999, |
|
"grad_norm": 11.58132266998291, |
|
"learning_rate": 5.255869814257701e-06, |
|
"loss": 1.5074, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.608, |
|
"grad_norm": 10.45251178741455, |
|
"learning_rate": 5.244248848978067e-06, |
|
"loss": 1.6304, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.6099999999999999, |
|
"grad_norm": 9.885043144226074, |
|
"learning_rate": 5.232626560988735e-06, |
|
"loss": 1.6131, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.612, |
|
"grad_norm": 6.945827960968018, |
|
"learning_rate": 5.221003013229253e-06, |
|
"loss": 1.5726, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.6139999999999999, |
|
"grad_norm": 10.760123252868652, |
|
"learning_rate": 5.209378268645998e-06, |
|
"loss": 1.6255, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.616, |
|
"grad_norm": 8.588194847106934, |
|
"learning_rate": 5.197752390191827e-06, |
|
"loss": 1.6538, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.6179999999999999, |
|
"grad_norm": 7.07314920425415, |
|
"learning_rate": 5.18612544082573e-06, |
|
"loss": 1.5529, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 9.155885696411133, |
|
"learning_rate": 5.174497483512506e-06, |
|
"loss": 1.7452, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.6219999999999999, |
|
"grad_norm": 5.848752498626709, |
|
"learning_rate": 5.162868581222407e-06, |
|
"loss": 1.6038, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.624, |
|
"grad_norm": 6.948676109313965, |
|
"learning_rate": 5.151238796930804e-06, |
|
"loss": 1.5384, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.626, |
|
"grad_norm": 7.449524402618408, |
|
"learning_rate": 5.139608193617846e-06, |
|
"loss": 1.7695, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.6280000000000001, |
|
"grad_norm": 9.431687355041504, |
|
"learning_rate": 5.127976834268112e-06, |
|
"loss": 1.6164, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 11.5729341506958, |
|
"learning_rate": 5.116344781870282e-06, |
|
"loss": 1.5993, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 10.78165054321289, |
|
"learning_rate": 5.1047120994167855e-06, |
|
"loss": 1.483, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.634, |
|
"grad_norm": 7.827059745788574, |
|
"learning_rate": 5.093078849903464e-06, |
|
"loss": 1.675, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.6360000000000001, |
|
"grad_norm": 8.432703971862793, |
|
"learning_rate": 5.081445096329229e-06, |
|
"loss": 1.5087, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.638, |
|
"grad_norm": 8.069623947143555, |
|
"learning_rate": 5.069810901695727e-06, |
|
"loss": 1.6049, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 8.646689414978027, |
|
"learning_rate": 5.0581763290069865e-06, |
|
"loss": 1.5509, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.642, |
|
"grad_norm": 7.84726619720459, |
|
"learning_rate": 5.046541441269085e-06, |
|
"loss": 1.5971, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.6440000000000001, |
|
"grad_norm": 9.143599510192871, |
|
"learning_rate": 5.034906301489808e-06, |
|
"loss": 1.5909, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.646, |
|
"grad_norm": 7.073722839355469, |
|
"learning_rate": 5.0232709726783065e-06, |
|
"loss": 1.4608, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.6480000000000001, |
|
"grad_norm": 7.9401397705078125, |
|
"learning_rate": 5.011635517844753e-06, |
|
"loss": 1.6247, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 7.164168834686279, |
|
"learning_rate": 5e-06, |
|
"loss": 1.5878, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.6520000000000001, |
|
"grad_norm": 8.36746883392334, |
|
"learning_rate": 4.988364482155249e-06, |
|
"loss": 1.5431, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.654, |
|
"grad_norm": 7.337077617645264, |
|
"learning_rate": 4.976729027321694e-06, |
|
"loss": 1.574, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.6560000000000001, |
|
"grad_norm": 8.89696979522705, |
|
"learning_rate": 4.965093698510192e-06, |
|
"loss": 1.6283, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.658, |
|
"grad_norm": 10.577298164367676, |
|
"learning_rate": 4.953458558730917e-06, |
|
"loss": 1.6708, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.6600000000000001, |
|
"grad_norm": 9.475666046142578, |
|
"learning_rate": 4.941823670993016e-06, |
|
"loss": 1.5338, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.662, |
|
"grad_norm": 9.556079864501953, |
|
"learning_rate": 4.9301890983042744e-06, |
|
"loss": 1.5048, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 6.210651397705078, |
|
"learning_rate": 4.9185549036707715e-06, |
|
"loss": 1.5286, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.666, |
|
"grad_norm": 8.869709968566895, |
|
"learning_rate": 4.906921150096538e-06, |
|
"loss": 1.6674, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.6680000000000001, |
|
"grad_norm": 7.895263671875, |
|
"learning_rate": 4.895287900583216e-06, |
|
"loss": 1.5605, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 12.988919258117676, |
|
"learning_rate": 4.883655218129719e-06, |
|
"loss": 1.7044, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.6720000000000002, |
|
"grad_norm": 7.571807861328125, |
|
"learning_rate": 4.87202316573189e-06, |
|
"loss": 1.5871, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.674, |
|
"grad_norm": 10.251978874206543, |
|
"learning_rate": 4.860391806382157e-06, |
|
"loss": 1.4593, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.6760000000000002, |
|
"grad_norm": 7.005547046661377, |
|
"learning_rate": 4.8487612030691975e-06, |
|
"loss": 1.5433, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.678, |
|
"grad_norm": 10.263413429260254, |
|
"learning_rate": 4.837131418777595e-06, |
|
"loss": 1.617, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 6.690262794494629, |
|
"learning_rate": 4.825502516487497e-06, |
|
"loss": 1.6738, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.682, |
|
"grad_norm": 9.945344924926758, |
|
"learning_rate": 4.813874559174271e-06, |
|
"loss": 1.5897, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6840000000000002, |
|
"grad_norm": 6.344468116760254, |
|
"learning_rate": 4.802247609808175e-06, |
|
"loss": 1.5544, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.686, |
|
"grad_norm": 6.898435592651367, |
|
"learning_rate": 4.7906217313540035e-06, |
|
"loss": 1.667, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.688, |
|
"grad_norm": 7.620174407958984, |
|
"learning_rate": 4.778996986770747e-06, |
|
"loss": 1.6153, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 8.348420143127441, |
|
"learning_rate": 4.767373439011267e-06, |
|
"loss": 1.5119, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.692, |
|
"grad_norm": 6.85077428817749, |
|
"learning_rate": 4.755751151021934e-06, |
|
"loss": 1.6303, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.694, |
|
"grad_norm": 6.931562900543213, |
|
"learning_rate": 4.744130185742301e-06, |
|
"loss": 1.535, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.696, |
|
"grad_norm": 7.309017658233643, |
|
"learning_rate": 4.732510606104754e-06, |
|
"loss": 1.6259, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.698, |
|
"grad_norm": 8.722718238830566, |
|
"learning_rate": 4.720892475034181e-06, |
|
"loss": 1.5336, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 9.0813627243042, |
|
"learning_rate": 4.7092758554476215e-06, |
|
"loss": 1.5323, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.702, |
|
"grad_norm": 8.41715145111084, |
|
"learning_rate": 4.6976608102539285e-06, |
|
"loss": 1.5765, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.704, |
|
"grad_norm": 9.829058647155762, |
|
"learning_rate": 4.686047402353433e-06, |
|
"loss": 1.4992, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.706, |
|
"grad_norm": 8.533679962158203, |
|
"learning_rate": 4.674435694637597e-06, |
|
"loss": 1.6102, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.708, |
|
"grad_norm": 8.357962608337402, |
|
"learning_rate": 4.662825749988675e-06, |
|
"loss": 1.5547, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 6.73824405670166, |
|
"learning_rate": 4.651217631279374e-06, |
|
"loss": 1.5051, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.712, |
|
"grad_norm": 13.358663558959961, |
|
"learning_rate": 4.639611401372514e-06, |
|
"loss": 1.6093, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.714, |
|
"grad_norm": 9.384882926940918, |
|
"learning_rate": 4.6280071231206845e-06, |
|
"loss": 1.5455, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.716, |
|
"grad_norm": 10.82388687133789, |
|
"learning_rate": 4.6164048593659076e-06, |
|
"loss": 1.5374, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.718, |
|
"grad_norm": 6.8409743309021, |
|
"learning_rate": 4.604804672939295e-06, |
|
"loss": 1.6312, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 7.881139755249023, |
|
"learning_rate": 4.59320662666071e-06, |
|
"loss": 1.5693, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.722, |
|
"grad_norm": 9.830620765686035, |
|
"learning_rate": 4.581610783338424e-06, |
|
"loss": 1.7035, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.724, |
|
"grad_norm": 7.074033737182617, |
|
"learning_rate": 4.570017205768779e-06, |
|
"loss": 1.5734, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.726, |
|
"grad_norm": 10.345975875854492, |
|
"learning_rate": 4.5584259567358505e-06, |
|
"loss": 1.474, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 8.930947303771973, |
|
"learning_rate": 4.546837099011101e-06, |
|
"loss": 1.5575, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 8.436446189880371, |
|
"learning_rate": 4.53525069535304e-06, |
|
"loss": 1.6057, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.732, |
|
"grad_norm": 8.274133682250977, |
|
"learning_rate": 4.523666808506893e-06, |
|
"loss": 1.6484, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.734, |
|
"grad_norm": 8.548066139221191, |
|
"learning_rate": 4.512085501204254e-06, |
|
"loss": 1.5368, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.736, |
|
"grad_norm": 8.803610801696777, |
|
"learning_rate": 4.500506836162746e-06, |
|
"loss": 1.6525, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.738, |
|
"grad_norm": 8.110183715820312, |
|
"learning_rate": 4.4889308760856826e-06, |
|
"loss": 1.5937, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 11.329172134399414, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 1.6, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.742, |
|
"grad_norm": 7.598846435546875, |
|
"learning_rate": 4.465787321564576e-06, |
|
"loss": 1.546, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.744, |
|
"grad_norm": 6.989120960235596, |
|
"learning_rate": 4.45421985245256e-06, |
|
"loss": 1.6186, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.746, |
|
"grad_norm": 9.312042236328125, |
|
"learning_rate": 4.442655338968373e-06, |
|
"loss": 1.6592, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.748, |
|
"grad_norm": 5.902655124664307, |
|
"learning_rate": 4.431093843738693e-06, |
|
"loss": 1.5185, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 6.853744029998779, |
|
"learning_rate": 4.4195354293738484e-06, |
|
"loss": 1.5383, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.752, |
|
"grad_norm": 8.076836585998535, |
|
"learning_rate": 4.4079801584674955e-06, |
|
"loss": 1.4652, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.754, |
|
"grad_norm": 6.988975524902344, |
|
"learning_rate": 4.396428093596258e-06, |
|
"loss": 1.5636, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.756, |
|
"grad_norm": 7.838133335113525, |
|
"learning_rate": 4.384879297319398e-06, |
|
"loss": 1.5891, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.758, |
|
"grad_norm": 6.956199645996094, |
|
"learning_rate": 4.373333832178478e-06, |
|
"loss": 1.5573, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 7.702134132385254, |
|
"learning_rate": 4.361791760697027e-06, |
|
"loss": 1.6047, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.762, |
|
"grad_norm": 8.466774940490723, |
|
"learning_rate": 4.3502531453801885e-06, |
|
"loss": 1.6113, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.764, |
|
"grad_norm": 7.680281639099121, |
|
"learning_rate": 4.3387180487143875e-06, |
|
"loss": 1.5202, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.766, |
|
"grad_norm": 8.012104988098145, |
|
"learning_rate": 4.3271865331670036e-06, |
|
"loss": 1.6169, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.768, |
|
"grad_norm": 9.084745407104492, |
|
"learning_rate": 4.315658661186016e-06, |
|
"loss": 1.4818, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 8.11100959777832, |
|
"learning_rate": 4.304134495199675e-06, |
|
"loss": 1.6068, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.772, |
|
"grad_norm": 7.566526412963867, |
|
"learning_rate": 4.2926140976161555e-06, |
|
"loss": 1.5804, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.774, |
|
"grad_norm": 6.527312755584717, |
|
"learning_rate": 4.281097530823237e-06, |
|
"loss": 1.5348, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 8.60621166229248, |
|
"learning_rate": 4.269584857187942e-06, |
|
"loss": 1.6018, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.778, |
|
"grad_norm": 6.590750217437744, |
|
"learning_rate": 4.258076139056217e-06, |
|
"loss": 1.5499, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 8.905508995056152, |
|
"learning_rate": 4.246571438752585e-06, |
|
"loss": 1.7064, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.782, |
|
"grad_norm": 8.529908180236816, |
|
"learning_rate": 4.23507081857981e-06, |
|
"loss": 1.534, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.784, |
|
"grad_norm": 9.130575180053711, |
|
"learning_rate": 4.2235743408185635e-06, |
|
"loss": 1.6427, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.786, |
|
"grad_norm": 8.667200088500977, |
|
"learning_rate": 4.212082067727079e-06, |
|
"loss": 1.4933, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.788, |
|
"grad_norm": 8.957341194152832, |
|
"learning_rate": 4.200594061540827e-06, |
|
"loss": 1.591, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 7.048657417297363, |
|
"learning_rate": 4.189110384472164e-06, |
|
"loss": 1.5583, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 8.028057098388672, |
|
"learning_rate": 4.1776310987100054e-06, |
|
"loss": 1.4866, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.794, |
|
"grad_norm": 8.738648414611816, |
|
"learning_rate": 4.166156266419489e-06, |
|
"loss": 1.6125, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.796, |
|
"grad_norm": 8.001562118530273, |
|
"learning_rate": 4.154685949741631e-06, |
|
"loss": 1.4699, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.798, |
|
"grad_norm": 8.324260711669922, |
|
"learning_rate": 4.143220210792993e-06, |
|
"loss": 1.5949, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 11.599272727966309, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 1.6686, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.802, |
|
"grad_norm": 7.560389518737793, |
|
"learning_rate": 4.1203027144253466e-06, |
|
"loss": 1.5041, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.804, |
|
"grad_norm": 7.603427886962891, |
|
"learning_rate": 4.108851081114169e-06, |
|
"loss": 1.4909, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.806, |
|
"grad_norm": 6.763309955596924, |
|
"learning_rate": 4.0974042737472005e-06, |
|
"loss": 1.5467, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.808, |
|
"grad_norm": 9.465215682983398, |
|
"learning_rate": 4.0859623543136935e-06, |
|
"loss": 1.5266, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 7.232966423034668, |
|
"learning_rate": 4.074525384776428e-06, |
|
"loss": 1.5645, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.812, |
|
"grad_norm": 7.509275436401367, |
|
"learning_rate": 4.063093427071376e-06, |
|
"loss": 1.5804, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.814, |
|
"grad_norm": 8.851505279541016, |
|
"learning_rate": 4.051666543107377e-06, |
|
"loss": 1.5903, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.8159999999999998, |
|
"grad_norm": 7.193111419677734, |
|
"learning_rate": 4.040244794765783e-06, |
|
"loss": 1.6361, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.818, |
|
"grad_norm": 13.53274154663086, |
|
"learning_rate": 4.028828243900141e-06, |
|
"loss": 1.5629, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.8199999999999998, |
|
"grad_norm": 8.000975608825684, |
|
"learning_rate": 4.017416952335849e-06, |
|
"loss": 1.55, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.822, |
|
"grad_norm": 9.145268440246582, |
|
"learning_rate": 4.006010981869829e-06, |
|
"loss": 1.5949, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 9.474929809570312, |
|
"learning_rate": 3.994610394270178e-06, |
|
"loss": 1.5537, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.826, |
|
"grad_norm": 8.653277397155762, |
|
"learning_rate": 3.983215251275847e-06, |
|
"loss": 1.6259, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.8279999999999998, |
|
"grad_norm": 7.7228617668151855, |
|
"learning_rate": 3.971825614596308e-06, |
|
"loss": 1.5946, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 9.129724502563477, |
|
"learning_rate": 3.960441545911205e-06, |
|
"loss": 1.5925, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.8319999999999999, |
|
"grad_norm": 9.415990829467773, |
|
"learning_rate": 3.949063106870031e-06, |
|
"loss": 1.5227, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.834, |
|
"grad_norm": 9.796669006347656, |
|
"learning_rate": 3.9376903590917945e-06, |
|
"loss": 1.5435, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.8359999999999999, |
|
"grad_norm": 9.218023300170898, |
|
"learning_rate": 3.926323364164684e-06, |
|
"loss": 1.4847, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.838, |
|
"grad_norm": 6.569913387298584, |
|
"learning_rate": 3.914962183645733e-06, |
|
"loss": 1.598, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 8.879981994628906, |
|
"learning_rate": 3.903606879060483e-06, |
|
"loss": 1.625, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.842, |
|
"grad_norm": 6.769296169281006, |
|
"learning_rate": 3.892257511902664e-06, |
|
"loss": 1.5214, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.8439999999999999, |
|
"grad_norm": 7.948380470275879, |
|
"learning_rate": 3.880914143633844e-06, |
|
"loss": 1.5543, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.846, |
|
"grad_norm": 6.776271820068359, |
|
"learning_rate": 3.869576835683109e-06, |
|
"loss": 1.5748, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.8479999999999999, |
|
"grad_norm": 8.43204402923584, |
|
"learning_rate": 3.8582456494467214e-06, |
|
"loss": 1.5795, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 9.641144752502441, |
|
"learning_rate": 3.8469206462878e-06, |
|
"loss": 1.5457, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.8519999999999999, |
|
"grad_norm": 9.725312232971191, |
|
"learning_rate": 3.835601887535971e-06, |
|
"loss": 1.5125, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.854, |
|
"grad_norm": 10.626245498657227, |
|
"learning_rate": 3.82428943448705e-06, |
|
"loss": 1.6646, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 8.524333000183105, |
|
"learning_rate": 3.812983348402703e-06, |
|
"loss": 1.6704, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.858, |
|
"grad_norm": 8.207454681396484, |
|
"learning_rate": 3.8016836905101157e-06, |
|
"loss": 1.6347, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.8599999999999999, |
|
"grad_norm": 9.849774360656738, |
|
"learning_rate": 3.790390522001662e-06, |
|
"loss": 1.5782, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.862, |
|
"grad_norm": 8.06809139251709, |
|
"learning_rate": 3.7791039040345743e-06, |
|
"loss": 1.6321, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.8639999999999999, |
|
"grad_norm": 8.71696949005127, |
|
"learning_rate": 3.767823897730612e-06, |
|
"loss": 1.66, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.866, |
|
"grad_norm": 7.418612480163574, |
|
"learning_rate": 3.756550564175727e-06, |
|
"loss": 1.5837, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.8679999999999999, |
|
"grad_norm": 7.238306999206543, |
|
"learning_rate": 3.745283964419736e-06, |
|
"loss": 1.5742, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 7.4198899269104, |
|
"learning_rate": 3.7340241594759917e-06, |
|
"loss": 1.7463, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 6.619482517242432, |
|
"learning_rate": 3.7227712103210485e-06, |
|
"loss": 1.738, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.874, |
|
"grad_norm": 7.6474223136901855, |
|
"learning_rate": 3.7115251778943314e-06, |
|
"loss": 1.5902, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.876, |
|
"grad_norm": 9.21036434173584, |
|
"learning_rate": 3.700286123097814e-06, |
|
"loss": 1.5674, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.8780000000000001, |
|
"grad_norm": 7.860700607299805, |
|
"learning_rate": 3.6890541067956775e-06, |
|
"loss": 1.6321, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 7.944121837615967, |
|
"learning_rate": 3.6778291898139907e-06, |
|
"loss": 1.6441, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.8820000000000001, |
|
"grad_norm": 9.204889297485352, |
|
"learning_rate": 3.6666114329403723e-06, |
|
"loss": 1.5388, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.884, |
|
"grad_norm": 6.569108009338379, |
|
"learning_rate": 3.655400896923672e-06, |
|
"loss": 1.5072, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.8860000000000001, |
|
"grad_norm": 6.905144691467285, |
|
"learning_rate": 3.6441976424736315e-06, |
|
"loss": 1.591, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.888, |
|
"grad_norm": 9.775949478149414, |
|
"learning_rate": 3.633001730260558e-06, |
|
"loss": 1.5154, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.8900000000000001, |
|
"grad_norm": 8.357000350952148, |
|
"learning_rate": 3.6218132209150047e-06, |
|
"loss": 1.7292, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.892, |
|
"grad_norm": 10.853804588317871, |
|
"learning_rate": 3.6106321750274275e-06, |
|
"loss": 1.6071, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.8940000000000001, |
|
"grad_norm": 7.702592849731445, |
|
"learning_rate": 3.5994586531478672e-06, |
|
"loss": 1.5247, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.896, |
|
"grad_norm": 8.18514633178711, |
|
"learning_rate": 3.5882927157856175e-06, |
|
"loss": 1.5674, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.8980000000000001, |
|
"grad_norm": 6.387351989746094, |
|
"learning_rate": 3.577134423408906e-06, |
|
"loss": 1.5946, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 7.652561187744141, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 1.5211, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9020000000000001, |
|
"grad_norm": 6.697483062744141, |
|
"learning_rate": 3.5548410152776414e-06, |
|
"loss": 1.6415, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.904, |
|
"grad_norm": 7.114959716796875, |
|
"learning_rate": 3.543706020251223e-06, |
|
"loss": 1.5515, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.9060000000000001, |
|
"grad_norm": 7.068447113037109, |
|
"learning_rate": 3.5325789116659493e-06, |
|
"loss": 1.6781, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.908, |
|
"grad_norm": 6.634418487548828, |
|
"learning_rate": 3.521459749779769e-06, |
|
"loss": 1.5848, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.9100000000000001, |
|
"grad_norm": 7.493570327758789, |
|
"learning_rate": 3.51034859480759e-06, |
|
"loss": 1.6289, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.912, |
|
"grad_norm": 9.925775527954102, |
|
"learning_rate": 3.4992455069209717e-06, |
|
"loss": 1.5641, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.9140000000000001, |
|
"grad_norm": 10.061575889587402, |
|
"learning_rate": 3.488150546247778e-06, |
|
"loss": 1.5989, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.916, |
|
"grad_norm": 8.778186798095703, |
|
"learning_rate": 3.4770637728718608e-06, |
|
"loss": 1.7018, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.9180000000000001, |
|
"grad_norm": 8.304412841796875, |
|
"learning_rate": 3.465985246832739e-06, |
|
"loss": 1.6029, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 7.178003311157227, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 1.5976, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.9220000000000002, |
|
"grad_norm": 9.42418384552002, |
|
"learning_rate": 3.4438531766993012e-06, |
|
"loss": 1.5237, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.924, |
|
"grad_norm": 8.352529525756836, |
|
"learning_rate": 3.4327997524594026e-06, |
|
"loss": 1.6348, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.9260000000000002, |
|
"grad_norm": 10.59170913696289, |
|
"learning_rate": 3.4217548152644887e-06, |
|
"loss": 1.5547, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.928, |
|
"grad_norm": 7.5832343101501465, |
|
"learning_rate": 3.4107184249275114e-06, |
|
"loss": 1.5294, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.9300000000000002, |
|
"grad_norm": 9.453595161437988, |
|
"learning_rate": 3.399690641215142e-06, |
|
"loss": 1.6929, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.932, |
|
"grad_norm": 8.6815185546875, |
|
"learning_rate": 3.3886715238474454e-06, |
|
"loss": 1.6084, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.9340000000000002, |
|
"grad_norm": 6.6434149742126465, |
|
"learning_rate": 3.3776611324975496e-06, |
|
"loss": 1.4945, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.936, |
|
"grad_norm": 9.94503402709961, |
|
"learning_rate": 3.3666595267913293e-06, |
|
"loss": 1.587, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.938, |
|
"grad_norm": 7.814234256744385, |
|
"learning_rate": 3.355666766307084e-06, |
|
"loss": 1.6181, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 8.182234764099121, |
|
"learning_rate": 3.3446829105752103e-06, |
|
"loss": 1.5493, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.942, |
|
"grad_norm": 7.502097129821777, |
|
"learning_rate": 3.3337080190778816e-06, |
|
"loss": 1.4921, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.944, |
|
"grad_norm": 6.4139604568481445, |
|
"learning_rate": 3.322742151248726e-06, |
|
"loss": 1.5533, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.946, |
|
"grad_norm": 7.537562370300293, |
|
"learning_rate": 3.311785366472506e-06, |
|
"loss": 1.5851, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.948, |
|
"grad_norm": 8.741056442260742, |
|
"learning_rate": 3.3008377240847955e-06, |
|
"loss": 1.5118, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 7.846621513366699, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 1.5772, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.952, |
|
"grad_norm": 9.379910469055176, |
|
"learning_rate": 3.2789701035693242e-06, |
|
"loss": 1.5339, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.954, |
|
"grad_norm": 9.359630584716797, |
|
"learning_rate": 3.268050243863877e-06, |
|
"loss": 1.6382, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.956, |
|
"grad_norm": 11.636152267456055, |
|
"learning_rate": 3.2571397633909252e-06, |
|
"loss": 1.5506, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.958, |
|
"grad_norm": 8.888190269470215, |
|
"learning_rate": 3.246238721235283e-06, |
|
"loss": 1.4885, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 8.042271614074707, |
|
"learning_rate": 3.2353471764306567e-06, |
|
"loss": 1.5741, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.962, |
|
"grad_norm": 7.534533500671387, |
|
"learning_rate": 3.224465187959316e-06, |
|
"loss": 1.5664, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.964, |
|
"grad_norm": 6.7709479331970215, |
|
"learning_rate": 3.2135928147517803e-06, |
|
"loss": 1.5943, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.966, |
|
"grad_norm": 8.381340026855469, |
|
"learning_rate": 3.2027301156865015e-06, |
|
"loss": 1.6603, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 9.999662399291992, |
|
"learning_rate": 3.1918771495895395e-06, |
|
"loss": 1.7147, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 11.495597839355469, |
|
"learning_rate": 3.1810339752342446e-06, |
|
"loss": 1.6706, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.972, |
|
"grad_norm": 9.23818588256836, |
|
"learning_rate": 3.1702006513409393e-06, |
|
"loss": 1.6012, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.974, |
|
"grad_norm": 10.354619026184082, |
|
"learning_rate": 3.1593772365766107e-06, |
|
"loss": 1.6126, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.976, |
|
"grad_norm": 9.37294864654541, |
|
"learning_rate": 3.148563789554575e-06, |
|
"loss": 1.6156, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.978, |
|
"grad_norm": 10.598193168640137, |
|
"learning_rate": 3.137760368834169e-06, |
|
"loss": 1.7388, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 9.898024559020996, |
|
"learning_rate": 3.12696703292044e-06, |
|
"loss": 1.6421, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.982, |
|
"grad_norm": 9.265511512756348, |
|
"learning_rate": 3.1161838402638158e-06, |
|
"loss": 1.5518, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 7.578424453735352, |
|
"learning_rate": 3.105410849259796e-06, |
|
"loss": 1.5866, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.986, |
|
"grad_norm": 10.20102310180664, |
|
"learning_rate": 3.09464811824863e-06, |
|
"loss": 1.6485, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.988, |
|
"grad_norm": 7.367624759674072, |
|
"learning_rate": 3.0838957055150136e-06, |
|
"loss": 1.6409, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 7.860062599182129, |
|
"learning_rate": 3.0731536692877596e-06, |
|
"loss": 1.535, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.992, |
|
"grad_norm": 10.079544067382812, |
|
"learning_rate": 3.0624220677394854e-06, |
|
"loss": 1.5819, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.994, |
|
"grad_norm": 6.722997665405273, |
|
"learning_rate": 3.0517009589863057e-06, |
|
"loss": 1.6481, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.996, |
|
"grad_norm": 11.341468811035156, |
|
"learning_rate": 3.040990401087508e-06, |
|
"loss": 1.695, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.998, |
|
"grad_norm": 6.343021392822266, |
|
"learning_rate": 3.030290452045245e-06, |
|
"loss": 1.6707, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 11.605053901672363, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 1.5376, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.002, |
|
"grad_norm": 9.129085540771484, |
|
"learning_rate": 3.0089226122513583e-06, |
|
"loss": 1.3932, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.004, |
|
"grad_norm": 7.879745006561279, |
|
"learning_rate": 2.9982548372155264e-06, |
|
"loss": 1.4429, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.006, |
|
"grad_norm": 7.70796537399292, |
|
"learning_rate": 2.9875979024671846e-06, |
|
"loss": 1.4238, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.008, |
|
"grad_norm": 8.425646781921387, |
|
"learning_rate": 2.9769518657180953e-06, |
|
"loss": 1.3642, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 7.608778953552246, |
|
"learning_rate": 2.966316784621e-06, |
|
"loss": 1.3483, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.012, |
|
"grad_norm": 9.824869155883789, |
|
"learning_rate": 2.9556927167693107e-06, |
|
"loss": 1.4165, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.014, |
|
"grad_norm": 7.270105838775635, |
|
"learning_rate": 2.945079719696802e-06, |
|
"loss": 1.3235, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 10.12110424041748, |
|
"learning_rate": 2.934477850877292e-06, |
|
"loss": 1.3102, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.018, |
|
"grad_norm": 8.959726333618164, |
|
"learning_rate": 2.9238871677243354e-06, |
|
"loss": 1.2703, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 8.507242202758789, |
|
"learning_rate": 2.9133077275909112e-06, |
|
"loss": 1.1562, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.022, |
|
"grad_norm": 11.486827850341797, |
|
"learning_rate": 2.9027395877691143e-06, |
|
"loss": 1.2312, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.024, |
|
"grad_norm": 9.935345649719238, |
|
"learning_rate": 2.892182805489846e-06, |
|
"loss": 1.1142, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.026, |
|
"grad_norm": 9.927567481994629, |
|
"learning_rate": 2.8816374379224932e-06, |
|
"loss": 1.0935, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.028, |
|
"grad_norm": 11.30349349975586, |
|
"learning_rate": 2.871103542174637e-06, |
|
"loss": 1.0115, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 11.31701946258545, |
|
"learning_rate": 2.86058117529173e-06, |
|
"loss": 1.0013, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.032, |
|
"grad_norm": 11.658658027648926, |
|
"learning_rate": 2.8500703942567874e-06, |
|
"loss": 1.0056, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.034, |
|
"grad_norm": 15.923133850097656, |
|
"learning_rate": 2.839571255990088e-06, |
|
"loss": 1.1577, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.036, |
|
"grad_norm": 16.112945556640625, |
|
"learning_rate": 2.82908381734886e-06, |
|
"loss": 1.0646, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.038, |
|
"grad_norm": 16.52297019958496, |
|
"learning_rate": 2.818608135126967e-06, |
|
"loss": 1.1384, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 17.75345802307129, |
|
"learning_rate": 2.8081442660546126e-06, |
|
"loss": 1.0859, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.042, |
|
"grad_norm": 18.585851669311523, |
|
"learning_rate": 2.797692266798027e-06, |
|
"loss": 1.0863, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.044, |
|
"grad_norm": 17.183208465576172, |
|
"learning_rate": 2.7872521939591556e-06, |
|
"loss": 1.0416, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.046, |
|
"grad_norm": 18.7218074798584, |
|
"learning_rate": 2.776824104075364e-06, |
|
"loss": 0.8856, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 20.450927734375, |
|
"learning_rate": 2.7664080536191178e-06, |
|
"loss": 1.1884, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 17.043212890625, |
|
"learning_rate": 2.7560040989976894e-06, |
|
"loss": 1.0495, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.052, |
|
"grad_norm": 15.958205223083496, |
|
"learning_rate": 2.7456122965528475e-06, |
|
"loss": 0.9814, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.054, |
|
"grad_norm": 15.604181289672852, |
|
"learning_rate": 2.7352327025605464e-06, |
|
"loss": 0.9974, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.056, |
|
"grad_norm": 14.632473945617676, |
|
"learning_rate": 2.724865373230632e-06, |
|
"loss": 1.002, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.058, |
|
"grad_norm": 14.43281364440918, |
|
"learning_rate": 2.714510364706531e-06, |
|
"loss": 0.9437, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 17.825098037719727, |
|
"learning_rate": 2.7041677330649408e-06, |
|
"loss": 1.0608, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.062, |
|
"grad_norm": 15.265949249267578, |
|
"learning_rate": 2.6938375343155464e-06, |
|
"loss": 1.1083, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 13.251029014587402, |
|
"learning_rate": 2.683519824400693e-06, |
|
"loss": 1.0742, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.066, |
|
"grad_norm": 13.636655807495117, |
|
"learning_rate": 2.6732146591950924e-06, |
|
"loss": 1.0812, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.068, |
|
"grad_norm": 14.354068756103516, |
|
"learning_rate": 2.662922094505529e-06, |
|
"loss": 1.1726, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 13.713906288146973, |
|
"learning_rate": 2.6526421860705474e-06, |
|
"loss": 1.1794, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.072, |
|
"grad_norm": 15.267590522766113, |
|
"learning_rate": 2.6423749895601494e-06, |
|
"loss": 1.1732, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.074, |
|
"grad_norm": 13.415190696716309, |
|
"learning_rate": 2.6321205605755002e-06, |
|
"loss": 1.1165, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.076, |
|
"grad_norm": 12.924043655395508, |
|
"learning_rate": 2.6218789546486235e-06, |
|
"loss": 1.1036, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.078, |
|
"grad_norm": 11.99133014678955, |
|
"learning_rate": 2.611650227242102e-06, |
|
"loss": 1.0163, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 11.737472534179688, |
|
"learning_rate": 2.601434433748771e-06, |
|
"loss": 0.9887, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.082, |
|
"grad_norm": 14.03266429901123, |
|
"learning_rate": 2.5912316294914232e-06, |
|
"loss": 1.0226, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.084, |
|
"grad_norm": 13.012822151184082, |
|
"learning_rate": 2.581041869722519e-06, |
|
"loss": 1.1165, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.086, |
|
"grad_norm": 13.519615173339844, |
|
"learning_rate": 2.5708652096238674e-06, |
|
"loss": 1.1066, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.088, |
|
"grad_norm": 11.919333457946777, |
|
"learning_rate": 2.560701704306336e-06, |
|
"loss": 0.9055, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 11.374495506286621, |
|
"learning_rate": 2.550551408809566e-06, |
|
"loss": 1.024, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.092, |
|
"grad_norm": 12.566267013549805, |
|
"learning_rate": 2.540414378101647e-06, |
|
"loss": 1.1746, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.094, |
|
"grad_norm": 13.454764366149902, |
|
"learning_rate": 2.5302906670788463e-06, |
|
"loss": 0.9756, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.096, |
|
"grad_norm": 12.35405445098877, |
|
"learning_rate": 2.52018033056529e-06, |
|
"loss": 1.0644, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.098, |
|
"grad_norm": 12.588571548461914, |
|
"learning_rate": 2.5100834233126827e-06, |
|
"loss": 1.1582, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 13.59836196899414, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 1.0943, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.102, |
|
"grad_norm": 12.1016206741333, |
|
"learning_rate": 2.489930115233199e-06, |
|
"loss": 1.0855, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.104, |
|
"grad_norm": 13.028971672058105, |
|
"learning_rate": 2.4798738235449164e-06, |
|
"loss": 1.1361, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.106, |
|
"grad_norm": 13.81318187713623, |
|
"learning_rate": 2.469831179394182e-06, |
|
"loss": 1.0715, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.108, |
|
"grad_norm": 12.744100570678711, |
|
"learning_rate": 2.4598022371661113e-06, |
|
"loss": 1.1322, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 13.312298774719238, |
|
"learning_rate": 2.4497870511716237e-06, |
|
"loss": 1.0601, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 12.346517562866211, |
|
"learning_rate": 2.4397856756471435e-06, |
|
"loss": 1.0386, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.114, |
|
"grad_norm": 11.398303985595703, |
|
"learning_rate": 2.429798164754299e-06, |
|
"loss": 1.1074, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.116, |
|
"grad_norm": 14.021239280700684, |
|
"learning_rate": 2.4198245725796427e-06, |
|
"loss": 1.1536, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.118, |
|
"grad_norm": 13.355626106262207, |
|
"learning_rate": 2.40986495313435e-06, |
|
"loss": 1.1139, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 11.496376037597656, |
|
"learning_rate": 2.3999193603539234e-06, |
|
"loss": 1.1207, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.122, |
|
"grad_norm": 12.229351043701172, |
|
"learning_rate": 2.3899878480979098e-06, |
|
"loss": 1.0847, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.124, |
|
"grad_norm": 12.624899864196777, |
|
"learning_rate": 2.380070470149605e-06, |
|
"loss": 1.1526, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.126, |
|
"grad_norm": 12.152226448059082, |
|
"learning_rate": 2.3701672802157567e-06, |
|
"loss": 1.0407, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.128, |
|
"grad_norm": 12.016361236572266, |
|
"learning_rate": 2.3602783319262847e-06, |
|
"loss": 1.0879, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 11.808967590332031, |
|
"learning_rate": 2.3504036788339763e-06, |
|
"loss": 1.1118, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.132, |
|
"grad_norm": 11.515680313110352, |
|
"learning_rate": 2.340543374414212e-06, |
|
"loss": 1.0536, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.134, |
|
"grad_norm": 10.819012641906738, |
|
"learning_rate": 2.330697472064667e-06, |
|
"loss": 1.0542, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.136, |
|
"grad_norm": 12.405117988586426, |
|
"learning_rate": 2.320866025105016e-06, |
|
"loss": 1.0506, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.138, |
|
"grad_norm": 11.67018985748291, |
|
"learning_rate": 2.3110490867766644e-06, |
|
"loss": 1.101, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 12.757566452026367, |
|
"learning_rate": 2.3012467102424373e-06, |
|
"loss": 1.093, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.142, |
|
"grad_norm": 12.47193717956543, |
|
"learning_rate": 2.2914589485863015e-06, |
|
"loss": 1.1508, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.144, |
|
"grad_norm": 11.597710609436035, |
|
"learning_rate": 2.2816858548130837e-06, |
|
"loss": 1.1538, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.146, |
|
"grad_norm": 12.810328483581543, |
|
"learning_rate": 2.2719274818481767e-06, |
|
"loss": 1.0238, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.148, |
|
"grad_norm": 13.841683387756348, |
|
"learning_rate": 2.2621838825372496e-06, |
|
"loss": 1.202, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 12.471292495727539, |
|
"learning_rate": 2.2524551096459703e-06, |
|
"loss": 1.0969, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.152, |
|
"grad_norm": 11.695208549499512, |
|
"learning_rate": 2.2427412158597133e-06, |
|
"loss": 1.0346, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.154, |
|
"grad_norm": 12.01142406463623, |
|
"learning_rate": 2.23304225378328e-06, |
|
"loss": 1.091, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.156, |
|
"grad_norm": 14.317254066467285, |
|
"learning_rate": 2.2233582759406065e-06, |
|
"loss": 1.1849, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.158, |
|
"grad_norm": 11.322208404541016, |
|
"learning_rate": 2.213689334774479e-06, |
|
"loss": 1.0879, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 14.340149879455566, |
|
"learning_rate": 2.204035482646267e-06, |
|
"loss": 1.1532, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.162, |
|
"grad_norm": 12.387587547302246, |
|
"learning_rate": 2.1943967718356123e-06, |
|
"loss": 1.0726, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.164, |
|
"grad_norm": 12.853300094604492, |
|
"learning_rate": 2.184773254540169e-06, |
|
"loss": 1.1011, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.166, |
|
"grad_norm": 12.014087677001953, |
|
"learning_rate": 2.175164982875311e-06, |
|
"loss": 1.1168, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.168, |
|
"grad_norm": 13.224556922912598, |
|
"learning_rate": 2.165572008873845e-06, |
|
"loss": 1.1332, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 10.886467933654785, |
|
"learning_rate": 2.155994384485742e-06, |
|
"loss": 0.9585, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.172, |
|
"grad_norm": 13.911365509033203, |
|
"learning_rate": 2.146432161577842e-06, |
|
"loss": 1.1193, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.174, |
|
"grad_norm": 11.698256492614746, |
|
"learning_rate": 2.1368853919335835e-06, |
|
"loss": 1.1248, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 11.102900505065918, |
|
"learning_rate": 2.12735412725272e-06, |
|
"loss": 1.1625, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.178, |
|
"grad_norm": 11.995986938476562, |
|
"learning_rate": 2.1178384191510344e-06, |
|
"loss": 1.1379, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 10.211990356445312, |
|
"learning_rate": 2.1083383191600676e-06, |
|
"loss": 1.0004, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.182, |
|
"grad_norm": 11.571313858032227, |
|
"learning_rate": 2.0988538787268374e-06, |
|
"loss": 1.1064, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.184, |
|
"grad_norm": 12.789441108703613, |
|
"learning_rate": 2.0893851492135536e-06, |
|
"loss": 1.1492, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.186, |
|
"grad_norm": 11.842870712280273, |
|
"learning_rate": 2.0799321818973488e-06, |
|
"loss": 1.1277, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.188, |
|
"grad_norm": 12.860237121582031, |
|
"learning_rate": 2.0704950279699986e-06, |
|
"loss": 1.112, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 12.648483276367188, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 0.9933, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.192, |
|
"grad_norm": 12.022543907165527, |
|
"learning_rate": 2.0516683646204836e-06, |
|
"loss": 1.0655, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.194, |
|
"grad_norm": 12.181710243225098, |
|
"learning_rate": 2.0422789571525813e-06, |
|
"loss": 1.1167, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.196, |
|
"grad_norm": 12.39856243133545, |
|
"learning_rate": 2.0329055669814936e-06, |
|
"loss": 1.1543, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.198, |
|
"grad_norm": 10.813960075378418, |
|
"learning_rate": 2.023548244868051e-06, |
|
"loss": 1.031, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 9.889363288879395, |
|
"learning_rate": 2.0142070414860704e-06, |
|
"loss": 1.0438, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.202, |
|
"grad_norm": 11.947040557861328, |
|
"learning_rate": 2.0048820074220716e-06, |
|
"loss": 1.0579, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.204, |
|
"grad_norm": 10.942092895507812, |
|
"learning_rate": 1.9955731931750182e-06, |
|
"loss": 1.1079, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.206, |
|
"grad_norm": 10.924947738647461, |
|
"learning_rate": 1.9862806491560315e-06, |
|
"loss": 1.1075, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 11.159443855285645, |
|
"learning_rate": 1.977004425688126e-06, |
|
"loss": 1.0548, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 11.316449165344238, |
|
"learning_rate": 1.9677445730059348e-06, |
|
"loss": 1.1579, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.212, |
|
"grad_norm": 12.146157264709473, |
|
"learning_rate": 1.958501141255427e-06, |
|
"loss": 1.136, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.214, |
|
"grad_norm": 10.939238548278809, |
|
"learning_rate": 1.9492741804936623e-06, |
|
"loss": 1.0623, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.216, |
|
"grad_norm": 12.354981422424316, |
|
"learning_rate": 1.9400637406884875e-06, |
|
"loss": 1.1298, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.218, |
|
"grad_norm": 10.884641647338867, |
|
"learning_rate": 1.9308698717182874e-06, |
|
"loss": 0.9976, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 12.507721900939941, |
|
"learning_rate": 1.9216926233717087e-06, |
|
"loss": 1.1589, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.222, |
|
"grad_norm": 12.828377723693848, |
|
"learning_rate": 1.9125320453473923e-06, |
|
"loss": 1.0951, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.224, |
|
"grad_norm": 12.240373611450195, |
|
"learning_rate": 1.9033881872537009e-06, |
|
"loss": 1.1288, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.226, |
|
"grad_norm": 11.162793159484863, |
|
"learning_rate": 1.8942610986084487e-06, |
|
"loss": 1.1229, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.228, |
|
"grad_norm": 12.57970905303955, |
|
"learning_rate": 1.88515082883864e-06, |
|
"loss": 1.0853, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 12.0857515335083, |
|
"learning_rate": 1.8760574272802002e-06, |
|
"loss": 1.1373, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.232, |
|
"grad_norm": 10.519421577453613, |
|
"learning_rate": 1.8669809431776991e-06, |
|
"loss": 1.1059, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.234, |
|
"grad_norm": 11.660649299621582, |
|
"learning_rate": 1.8579214256840938e-06, |
|
"loss": 1.2105, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.2359999999999998, |
|
"grad_norm": 11.070806503295898, |
|
"learning_rate": 1.8488789238604676e-06, |
|
"loss": 1.0679, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.238, |
|
"grad_norm": 11.053348541259766, |
|
"learning_rate": 1.8398534866757455e-06, |
|
"loss": 1.0581, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 12.008465766906738, |
|
"learning_rate": 1.8308451630064484e-06, |
|
"loss": 1.1217, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.242, |
|
"grad_norm": 12.64566707611084, |
|
"learning_rate": 1.8218540016364178e-06, |
|
"loss": 1.0691, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.2439999999999998, |
|
"grad_norm": 12.815990447998047, |
|
"learning_rate": 1.8128800512565514e-06, |
|
"loss": 1.0881, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.246, |
|
"grad_norm": 12.682955741882324, |
|
"learning_rate": 1.8039233604645468e-06, |
|
"loss": 1.0893, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.248, |
|
"grad_norm": 11.445165634155273, |
|
"learning_rate": 1.7949839777646327e-06, |
|
"loss": 1.1113, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 11.555493354797363, |
|
"learning_rate": 1.7860619515673034e-06, |
|
"loss": 1.1268, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.252, |
|
"grad_norm": 12.906476974487305, |
|
"learning_rate": 1.7771573301890666e-06, |
|
"loss": 1.1886, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.254, |
|
"grad_norm": 11.322837829589844, |
|
"learning_rate": 1.7682701618521687e-06, |
|
"loss": 1.0748, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 14.397950172424316, |
|
"learning_rate": 1.7594004946843458e-06, |
|
"loss": 1.174, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.258, |
|
"grad_norm": 12.00009536743164, |
|
"learning_rate": 1.7505483767185583e-06, |
|
"loss": 1.0512, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 13.125904083251953, |
|
"learning_rate": 1.7417138558927244e-06, |
|
"loss": 1.1748, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.262, |
|
"grad_norm": 11.499984741210938, |
|
"learning_rate": 1.7328969800494727e-06, |
|
"loss": 0.9966, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.2640000000000002, |
|
"grad_norm": 11.893850326538086, |
|
"learning_rate": 1.7240977969358757e-06, |
|
"loss": 1.1099, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.266, |
|
"grad_norm": 13.56734848022461, |
|
"learning_rate": 1.7153163542031881e-06, |
|
"loss": 1.1765, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.268, |
|
"grad_norm": 11.519514083862305, |
|
"learning_rate": 1.7065526994065973e-06, |
|
"loss": 1.0598, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 13.245732307434082, |
|
"learning_rate": 1.6978068800049624e-06, |
|
"loss": 0.9934, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.2720000000000002, |
|
"grad_norm": 12.127825736999512, |
|
"learning_rate": 1.6890789433605508e-06, |
|
"loss": 1.1238, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.274, |
|
"grad_norm": 12.772904396057129, |
|
"learning_rate": 1.680368936738792e-06, |
|
"loss": 1.1137, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.276, |
|
"grad_norm": 13.146944046020508, |
|
"learning_rate": 1.671676907308018e-06, |
|
"loss": 1.1266, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.278, |
|
"grad_norm": 11.267017364501953, |
|
"learning_rate": 1.6630029021392007e-06, |
|
"loss": 1.1258, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 11.609925270080566, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 1.097, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.282, |
|
"grad_norm": 12.532341957092285, |
|
"learning_rate": 1.645709152383046e-06, |
|
"loss": 1.193, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.284, |
|
"grad_norm": 11.958260536193848, |
|
"learning_rate": 1.6370895014486e-06, |
|
"loss": 1.0575, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.286, |
|
"grad_norm": 12.34417724609375, |
|
"learning_rate": 1.6284880620813847e-06, |
|
"loss": 1.0509, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.288, |
|
"grad_norm": 12.214522361755371, |
|
"learning_rate": 1.6199048808617896e-06, |
|
"loss": 1.0467, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 12.764805793762207, |
|
"learning_rate": 1.611340004271339e-06, |
|
"loss": 1.0441, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.292, |
|
"grad_norm": 11.342555046081543, |
|
"learning_rate": 1.6027934786924187e-06, |
|
"loss": 1.1076, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.294, |
|
"grad_norm": 13.207151412963867, |
|
"learning_rate": 1.594265350408039e-06, |
|
"loss": 1.2069, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.296, |
|
"grad_norm": 11.775276184082031, |
|
"learning_rate": 1.5857556656015837e-06, |
|
"loss": 1.0898, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.298, |
|
"grad_norm": 11.530139923095703, |
|
"learning_rate": 1.5772644703565564e-06, |
|
"loss": 1.098, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 11.4733247756958, |
|
"learning_rate": 1.5687918106563326e-06, |
|
"loss": 1.0295, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.302, |
|
"grad_norm": 11.146039962768555, |
|
"learning_rate": 1.5603377323839069e-06, |
|
"loss": 1.0014, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 13.669384002685547, |
|
"learning_rate": 1.551902281321651e-06, |
|
"loss": 1.2271, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.306, |
|
"grad_norm": 12.079588890075684, |
|
"learning_rate": 1.5434855031510626e-06, |
|
"loss": 1.0904, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.308, |
|
"grad_norm": 11.25638198852539, |
|
"learning_rate": 1.5350874434525142e-06, |
|
"loss": 1.0801, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 11.719844818115234, |
|
"learning_rate": 1.5267081477050132e-06, |
|
"loss": 1.0488, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.312, |
|
"grad_norm": 11.61817455291748, |
|
"learning_rate": 1.5183476612859538e-06, |
|
"loss": 1.0197, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.314, |
|
"grad_norm": 12.426447868347168, |
|
"learning_rate": 1.5100060294708647e-06, |
|
"loss": 1.155, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.316, |
|
"grad_norm": 11.935876846313477, |
|
"learning_rate": 1.5016832974331725e-06, |
|
"loss": 1.0953, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.318, |
|
"grad_norm": 15.38991641998291, |
|
"learning_rate": 1.4933795102439558e-06, |
|
"loss": 1.212, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 11.428476333618164, |
|
"learning_rate": 1.4850947128716914e-06, |
|
"loss": 1.1117, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.322, |
|
"grad_norm": 11.7312593460083, |
|
"learning_rate": 1.4768289501820265e-06, |
|
"loss": 1.0994, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.324, |
|
"grad_norm": 11.2421236038208, |
|
"learning_rate": 1.4685822669375239e-06, |
|
"loss": 1.1205, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.326, |
|
"grad_norm": 10.058606147766113, |
|
"learning_rate": 1.4603547077974217e-06, |
|
"loss": 1.0785, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.328, |
|
"grad_norm": 11.7360258102417, |
|
"learning_rate": 1.4521463173173966e-06, |
|
"loss": 1.097, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 13.815256118774414, |
|
"learning_rate": 1.4439571399493146e-06, |
|
"loss": 1.0887, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.332, |
|
"grad_norm": 12.337494850158691, |
|
"learning_rate": 1.4357872200409988e-06, |
|
"loss": 1.1101, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.334, |
|
"grad_norm": 11.706501007080078, |
|
"learning_rate": 1.4276366018359845e-06, |
|
"loss": 1.0746, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.336, |
|
"grad_norm": 12.499887466430664, |
|
"learning_rate": 1.4195053294732757e-06, |
|
"loss": 1.1403, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.338, |
|
"grad_norm": 12.3418550491333, |
|
"learning_rate": 1.4113934469871166e-06, |
|
"loss": 1.1632, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 12.219500541687012, |
|
"learning_rate": 1.4033009983067454e-06, |
|
"loss": 1.1484, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.342, |
|
"grad_norm": 11.66283893585205, |
|
"learning_rate": 1.3952280272561541e-06, |
|
"loss": 1.0012, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.344, |
|
"grad_norm": 13.044709205627441, |
|
"learning_rate": 1.3871745775538598e-06, |
|
"loss": 1.2462, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.346, |
|
"grad_norm": 11.258277893066406, |
|
"learning_rate": 1.3791406928126638e-06, |
|
"loss": 1.1165, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.348, |
|
"grad_norm": 11.9083833694458, |
|
"learning_rate": 1.371126416539409e-06, |
|
"loss": 1.0926, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 12.25865364074707, |
|
"learning_rate": 1.3631317921347564e-06, |
|
"loss": 1.1481, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 11.566573143005371, |
|
"learning_rate": 1.3551568628929434e-06, |
|
"loss": 1.0938, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.354, |
|
"grad_norm": 12.630902290344238, |
|
"learning_rate": 1.3472016720015447e-06, |
|
"loss": 1.1382, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.356, |
|
"grad_norm": 12.472233772277832, |
|
"learning_rate": 1.339266262541249e-06, |
|
"loss": 1.149, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.358, |
|
"grad_norm": 11.453776359558105, |
|
"learning_rate": 1.3313506774856177e-06, |
|
"loss": 1.0327, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 10.934307098388672, |
|
"learning_rate": 1.3234549597008572e-06, |
|
"loss": 1.1564, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.362, |
|
"grad_norm": 13.961017608642578, |
|
"learning_rate": 1.3155791519455812e-06, |
|
"loss": 1.0754, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.364, |
|
"grad_norm": 12.650055885314941, |
|
"learning_rate": 1.3077232968705805e-06, |
|
"loss": 1.1337, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.366, |
|
"grad_norm": 12.112220764160156, |
|
"learning_rate": 1.2998874370186026e-06, |
|
"loss": 1.0891, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 11.407106399536133, |
|
"learning_rate": 1.2920716148241036e-06, |
|
"loss": 1.1488, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 11.474730491638184, |
|
"learning_rate": 1.2842758726130283e-06, |
|
"loss": 1.1326, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.372, |
|
"grad_norm": 12.02614974975586, |
|
"learning_rate": 1.2765002526025871e-06, |
|
"loss": 1.0903, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.374, |
|
"grad_norm": 13.217632293701172, |
|
"learning_rate": 1.2687447969010113e-06, |
|
"loss": 1.1562, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.376, |
|
"grad_norm": 13.802258491516113, |
|
"learning_rate": 1.2610095475073415e-06, |
|
"loss": 1.1038, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.378, |
|
"grad_norm": 11.107643127441406, |
|
"learning_rate": 1.2532945463111856e-06, |
|
"loss": 1.0914, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 12.443587303161621, |
|
"learning_rate": 1.2455998350925042e-06, |
|
"loss": 1.1518, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.382, |
|
"grad_norm": 12.572113990783691, |
|
"learning_rate": 1.2379254555213788e-06, |
|
"loss": 1.1528, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.384, |
|
"grad_norm": 11.02535629272461, |
|
"learning_rate": 1.2302714491577834e-06, |
|
"loss": 1.1726, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.386, |
|
"grad_norm": 12.016326904296875, |
|
"learning_rate": 1.2226378574513654e-06, |
|
"loss": 1.1441, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.388, |
|
"grad_norm": 11.50503921508789, |
|
"learning_rate": 1.2150247217412186e-06, |
|
"loss": 1.2099, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 11.974237442016602, |
|
"learning_rate": 1.2074320832556558e-06, |
|
"loss": 1.1982, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.392, |
|
"grad_norm": 11.554012298583984, |
|
"learning_rate": 1.1998599831119912e-06, |
|
"loss": 1.0467, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.394, |
|
"grad_norm": 13.131747245788574, |
|
"learning_rate": 1.1923084623163172e-06, |
|
"loss": 1.143, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.396, |
|
"grad_norm": 10.756204605102539, |
|
"learning_rate": 1.1847775617632746e-06, |
|
"loss": 1.1161, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.398, |
|
"grad_norm": 12.911304473876953, |
|
"learning_rate": 1.1772673222358421e-06, |
|
"loss": 1.1532, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 10.806382179260254, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 1.0573, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.402, |
|
"grad_norm": 11.249881744384766, |
|
"learning_rate": 1.162308988830057e-06, |
|
"loss": 1.1033, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.404, |
|
"grad_norm": 11.274486541748047, |
|
"learning_rate": 1.1548609759573375e-06, |
|
"loss": 1.2207, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.406, |
|
"grad_norm": 10.787302017211914, |
|
"learning_rate": 1.1474337861210543e-06, |
|
"loss": 1.1259, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.408, |
|
"grad_norm": 10.975671768188477, |
|
"learning_rate": 1.1400274595425499e-06, |
|
"loss": 1.0418, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 10.228716850280762, |
|
"learning_rate": 1.132642036330181e-06, |
|
"loss": 1.087, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.412, |
|
"grad_norm": 13.028627395629883, |
|
"learning_rate": 1.1252775564791023e-06, |
|
"loss": 1.1651, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.414, |
|
"grad_norm": 10.35562801361084, |
|
"learning_rate": 1.1179340598710547e-06, |
|
"loss": 1.1121, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.416, |
|
"grad_norm": 11.16613483428955, |
|
"learning_rate": 1.1106115862741457e-06, |
|
"loss": 1.1242, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.418, |
|
"grad_norm": 11.81697940826416, |
|
"learning_rate": 1.1033101753426285e-06, |
|
"loss": 1.167, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 13.364669799804688, |
|
"learning_rate": 1.096029866616704e-06, |
|
"loss": 1.0961, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.422, |
|
"grad_norm": 11.796897888183594, |
|
"learning_rate": 1.0887706995222864e-06, |
|
"loss": 1.137, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.424, |
|
"grad_norm": 11.452189445495605, |
|
"learning_rate": 1.0815327133708015e-06, |
|
"loss": 1.1626, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.426, |
|
"grad_norm": 11.605690956115723, |
|
"learning_rate": 1.0743159473589738e-06, |
|
"loss": 1.1818, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.428, |
|
"grad_norm": 12.917741775512695, |
|
"learning_rate": 1.0671204405686108e-06, |
|
"loss": 1.0901, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 11.108345985412598, |
|
"learning_rate": 1.0599462319663906e-06, |
|
"loss": 1.0953, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 11.12553882598877, |
|
"learning_rate": 1.052793360403655e-06, |
|
"loss": 1.1915, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.434, |
|
"grad_norm": 12.073833465576172, |
|
"learning_rate": 1.0456618646161954e-06, |
|
"loss": 1.2316, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.436, |
|
"grad_norm": 11.272497177124023, |
|
"learning_rate": 1.0385517832240472e-06, |
|
"loss": 1.1353, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.438, |
|
"grad_norm": 11.970922470092773, |
|
"learning_rate": 1.0314631547312738e-06, |
|
"loss": 1.1071, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 11.06673812866211, |
|
"learning_rate": 1.0243960175257605e-06, |
|
"loss": 1.1197, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.442, |
|
"grad_norm": 11.756420135498047, |
|
"learning_rate": 1.0173504098790188e-06, |
|
"loss": 1.0385, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.444, |
|
"grad_norm": 11.263325691223145, |
|
"learning_rate": 1.010326369945957e-06, |
|
"loss": 1.0892, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.446, |
|
"grad_norm": 11.817276954650879, |
|
"learning_rate": 1.0033239357646913e-06, |
|
"loss": 1.138, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 11.443320274353027, |
|
"learning_rate": 9.963431452563331e-07, |
|
"loss": 1.0683, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 11.445216178894043, |
|
"learning_rate": 9.893840362247809e-07, |
|
"loss": 1.1286, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.452, |
|
"grad_norm": 12.109580039978027, |
|
"learning_rate": 9.824466463565246e-07, |
|
"loss": 1.2696, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.454, |
|
"grad_norm": 12.882740020751953, |
|
"learning_rate": 9.7553101322043e-07, |
|
"loss": 1.1698, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.456, |
|
"grad_norm": 12.377523422241211, |
|
"learning_rate": 9.686371742675443e-07, |
|
"loss": 1.2098, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.458, |
|
"grad_norm": 11.193984985351562, |
|
"learning_rate": 9.617651668308914e-07, |
|
"loss": 1.1167, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 11.221639633178711, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 1.1706, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.462, |
|
"grad_norm": 11.720982551574707, |
|
"learning_rate": 9.480867952470285e-07, |
|
"loss": 1.1991, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.464, |
|
"grad_norm": 9.958009719848633, |
|
"learning_rate": 9.412805051739266e-07, |
|
"loss": 1.0561, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.466, |
|
"grad_norm": 12.225953102111816, |
|
"learning_rate": 9.344961947648624e-07, |
|
"loss": 1.0812, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.468, |
|
"grad_norm": 11.826346397399902, |
|
"learning_rate": 9.277339007597158e-07, |
|
"loss": 1.1611, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.4699999999999998, |
|
"grad_norm": 11.533407211303711, |
|
"learning_rate": 9.209936597791407e-07, |
|
"loss": 1.1837, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.472, |
|
"grad_norm": 10.118829727172852, |
|
"learning_rate": 9.142755083243577e-07, |
|
"loss": 1.0882, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.474, |
|
"grad_norm": 13.343694686889648, |
|
"learning_rate": 9.075794827769696e-07, |
|
"loss": 1.1914, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.476, |
|
"grad_norm": 11.293721199035645, |
|
"learning_rate": 9.009056193987569e-07, |
|
"loss": 1.0637, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.4779999999999998, |
|
"grad_norm": 10.591233253479004, |
|
"learning_rate": 8.942539543314799e-07, |
|
"loss": 1.1345, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 13.138537406921387, |
|
"learning_rate": 8.876245235966884e-07, |
|
"loss": 1.2053, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.482, |
|
"grad_norm": 10.92805290222168, |
|
"learning_rate": 8.810173630955249e-07, |
|
"loss": 1.1372, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.484, |
|
"grad_norm": 11.212373733520508, |
|
"learning_rate": 8.744325086085248e-07, |
|
"loss": 1.0706, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.4859999999999998, |
|
"grad_norm": 11.673620223999023, |
|
"learning_rate": 8.678699957954323e-07, |
|
"loss": 1.1367, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.488, |
|
"grad_norm": 11.204456329345703, |
|
"learning_rate": 8.613298601949971e-07, |
|
"loss": 1.0237, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 12.066752433776855, |
|
"learning_rate": 8.54812137224792e-07, |
|
"loss": 1.2087, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.492, |
|
"grad_norm": 10.22867202758789, |
|
"learning_rate": 8.483168621810133e-07, |
|
"loss": 1.1078, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.4939999999999998, |
|
"grad_norm": 12.135293960571289, |
|
"learning_rate": 8.418440702382897e-07, |
|
"loss": 1.0668, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 9.851129531860352, |
|
"learning_rate": 8.353937964495029e-07, |
|
"loss": 1.0521, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.498, |
|
"grad_norm": 10.662277221679688, |
|
"learning_rate": 8.289660757455803e-07, |
|
"loss": 1.0326, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 13.312153816223145, |
|
"learning_rate": 8.225609429353187e-07, |
|
"loss": 1.0838, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.502, |
|
"grad_norm": 10.633809089660645, |
|
"learning_rate": 8.161784327051919e-07, |
|
"loss": 1.0238, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.504, |
|
"grad_norm": 11.557962417602539, |
|
"learning_rate": 8.098185796191632e-07, |
|
"loss": 1.1297, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.5060000000000002, |
|
"grad_norm": 10.614686965942383, |
|
"learning_rate": 8.034814181184996e-07, |
|
"loss": 1.1224, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.508, |
|
"grad_norm": 11.647783279418945, |
|
"learning_rate": 7.971669825215789e-07, |
|
"loss": 1.191, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 12.704024314880371, |
|
"learning_rate": 7.908753070237124e-07, |
|
"loss": 1.0431, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.512, |
|
"grad_norm": 11.132079124450684, |
|
"learning_rate": 7.846064256969571e-07, |
|
"loss": 1.1417, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.5140000000000002, |
|
"grad_norm": 12.66719913482666, |
|
"learning_rate": 7.783603724899258e-07, |
|
"loss": 1.1726, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.516, |
|
"grad_norm": 13.010668754577637, |
|
"learning_rate": 7.72137181227608e-07, |
|
"loss": 1.2599, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.518, |
|
"grad_norm": 13.327101707458496, |
|
"learning_rate": 7.659368856111926e-07, |
|
"loss": 1.0874, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 12.53348445892334, |
|
"learning_rate": 7.597595192178702e-07, |
|
"loss": 1.0867, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.5220000000000002, |
|
"grad_norm": 11.78579044342041, |
|
"learning_rate": 7.536051155006657e-07, |
|
"loss": 1.1224, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.524, |
|
"grad_norm": 12.989802360534668, |
|
"learning_rate": 7.47473707788251e-07, |
|
"loss": 1.0848, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.526, |
|
"grad_norm": 10.429177284240723, |
|
"learning_rate": 7.413653292847617e-07, |
|
"loss": 1.0456, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.528, |
|
"grad_norm": 12.047698020935059, |
|
"learning_rate": 7.352800130696253e-07, |
|
"loss": 1.1725, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.5300000000000002, |
|
"grad_norm": 13.212532043457031, |
|
"learning_rate": 7.292177920973726e-07, |
|
"loss": 1.0755, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.532, |
|
"grad_norm": 11.261322021484375, |
|
"learning_rate": 7.23178699197467e-07, |
|
"loss": 1.0894, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.534, |
|
"grad_norm": 12.5214262008667, |
|
"learning_rate": 7.171627670741243e-07, |
|
"loss": 1.0554, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.536, |
|
"grad_norm": 11.23154067993164, |
|
"learning_rate": 7.111700283061318e-07, |
|
"loss": 1.1117, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.5380000000000003, |
|
"grad_norm": 14.136534690856934, |
|
"learning_rate": 7.052005153466779e-07, |
|
"loss": 1.1256, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 11.402527809143066, |
|
"learning_rate": 6.992542605231739e-07, |
|
"loss": 1.1365, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.542, |
|
"grad_norm": 10.729368209838867, |
|
"learning_rate": 6.933312960370748e-07, |
|
"loss": 1.0958, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 12.732880592346191, |
|
"learning_rate": 6.874316539637127e-07, |
|
"loss": 1.1066, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.5460000000000003, |
|
"grad_norm": 11.761198043823242, |
|
"learning_rate": 6.815553662521185e-07, |
|
"loss": 1.0512, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.548, |
|
"grad_norm": 12.321219444274902, |
|
"learning_rate": 6.757024647248456e-07, |
|
"loss": 1.1105, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 10.499734878540039, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 1.1287, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.552, |
|
"grad_norm": 11.47294807434082, |
|
"learning_rate": 6.640669468800947e-07, |
|
"loss": 1.1906, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.5540000000000003, |
|
"grad_norm": 10.670211791992188, |
|
"learning_rate": 6.58284393573812e-07, |
|
"loss": 1.0593, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.556, |
|
"grad_norm": 11.801868438720703, |
|
"learning_rate": 6.52525352473905e-07, |
|
"loss": 1.0933, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.558, |
|
"grad_norm": 12.327805519104004, |
|
"learning_rate": 6.467898547679913e-07, |
|
"loss": 1.0864, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 12.2786226272583, |
|
"learning_rate": 6.410779315161885e-07, |
|
"loss": 1.1561, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.5620000000000003, |
|
"grad_norm": 11.15182876586914, |
|
"learning_rate": 6.353896136509524e-07, |
|
"loss": 1.1301, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.564, |
|
"grad_norm": 11.805049896240234, |
|
"learning_rate": 6.297249319769016e-07, |
|
"loss": 1.0785, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.566, |
|
"grad_norm": 11.21564769744873, |
|
"learning_rate": 6.240839171706608e-07, |
|
"loss": 1.2017, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.568, |
|
"grad_norm": 12.291101455688477, |
|
"learning_rate": 6.184665997806832e-07, |
|
"loss": 1.0942, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 11.54765510559082, |
|
"learning_rate": 6.128730102270897e-07, |
|
"loss": 1.1542, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.572, |
|
"grad_norm": 10.28836441040039, |
|
"learning_rate": 6.073031788015133e-07, |
|
"loss": 1.0565, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.574, |
|
"grad_norm": 12.515631675720215, |
|
"learning_rate": 6.017571356669183e-07, |
|
"loss": 1.0679, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.576, |
|
"grad_norm": 12.502131462097168, |
|
"learning_rate": 5.962349108574478e-07, |
|
"loss": 1.0424, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.578, |
|
"grad_norm": 11.3397855758667, |
|
"learning_rate": 5.9073653427826e-07, |
|
"loss": 1.0948, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 10.07397174835205, |
|
"learning_rate": 5.852620357053651e-07, |
|
"loss": 1.0421, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.582, |
|
"grad_norm": 11.410602569580078, |
|
"learning_rate": 5.798114447854636e-07, |
|
"loss": 1.1638, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.584, |
|
"grad_norm": 11.01577091217041, |
|
"learning_rate": 5.743847910357836e-07, |
|
"loss": 1.0889, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.586, |
|
"grad_norm": 12.289595603942871, |
|
"learning_rate": 5.689821038439264e-07, |
|
"loss": 1.1365, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.588, |
|
"grad_norm": 12.653968811035156, |
|
"learning_rate": 5.636034124677043e-07, |
|
"loss": 1.208, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 11.91626262664795, |
|
"learning_rate": 5.582487460349806e-07, |
|
"loss": 1.2131, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 12.287956237792969, |
|
"learning_rate": 5.529181335435124e-07, |
|
"loss": 1.096, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.594, |
|
"grad_norm": 14.091611862182617, |
|
"learning_rate": 5.476116038607993e-07, |
|
"loss": 1.0923, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.596, |
|
"grad_norm": 11.291535377502441, |
|
"learning_rate": 5.423291857239177e-07, |
|
"loss": 1.0751, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.598, |
|
"grad_norm": 10.707423210144043, |
|
"learning_rate": 5.370709077393721e-07, |
|
"loss": 1.1536, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 10.066755294799805, |
|
"learning_rate": 5.318367983829393e-07, |
|
"loss": 1.1073, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.602, |
|
"grad_norm": 12.727775573730469, |
|
"learning_rate": 5.266268859995083e-07, |
|
"loss": 1.0421, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.604, |
|
"grad_norm": 10.819839477539062, |
|
"learning_rate": 5.214411988029355e-07, |
|
"loss": 1.0953, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.606, |
|
"grad_norm": 11.471372604370117, |
|
"learning_rate": 5.162797648758877e-07, |
|
"loss": 1.1238, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.608, |
|
"grad_norm": 11.147175788879395, |
|
"learning_rate": 5.111426121696866e-07, |
|
"loss": 1.1459, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 10.736647605895996, |
|
"learning_rate": 5.06029768504166e-07, |
|
"loss": 1.0977, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.612, |
|
"grad_norm": 10.199406623840332, |
|
"learning_rate": 5.009412615675102e-07, |
|
"loss": 1.0806, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.614, |
|
"grad_norm": 10.652769088745117, |
|
"learning_rate": 4.958771189161149e-07, |
|
"loss": 1.1096, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.616, |
|
"grad_norm": 10.275524139404297, |
|
"learning_rate": 4.908373679744316e-07, |
|
"loss": 1.1095, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.618, |
|
"grad_norm": 11.423064231872559, |
|
"learning_rate": 4.858220360348187e-07, |
|
"loss": 1.1176, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 12.112739562988281, |
|
"learning_rate": 4.808311502573976e-07, |
|
"loss": 1.1444, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.622, |
|
"grad_norm": 10.57923412322998, |
|
"learning_rate": 4.758647376699033e-07, |
|
"loss": 1.1199, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 11.439313888549805, |
|
"learning_rate": 4.709228251675357e-07, |
|
"loss": 1.0995, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.626, |
|
"grad_norm": 11.128246307373047, |
|
"learning_rate": 4.6600543951281995e-07, |
|
"loss": 1.1438, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.628, |
|
"grad_norm": 12.107537269592285, |
|
"learning_rate": 4.6111260733545714e-07, |
|
"loss": 1.0989, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 11.53816032409668, |
|
"learning_rate": 4.562443551321788e-07, |
|
"loss": 1.1305, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.632, |
|
"grad_norm": 10.835124015808105, |
|
"learning_rate": 4.514007092666084e-07, |
|
"loss": 1.1451, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.634, |
|
"grad_norm": 11.95799446105957, |
|
"learning_rate": 4.4658169596911493e-07, |
|
"loss": 1.1823, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.636, |
|
"grad_norm": 10.443716049194336, |
|
"learning_rate": 4.417873413366702e-07, |
|
"loss": 1.0862, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.638, |
|
"grad_norm": 12.407042503356934, |
|
"learning_rate": 4.370176713327118e-07, |
|
"loss": 1.0715, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 12.52112102508545, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 1.1408, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.642, |
|
"grad_norm": 12.11574935913086, |
|
"learning_rate": 4.275524883954657e-07, |
|
"loss": 1.1602, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.644, |
|
"grad_norm": 11.21030330657959, |
|
"learning_rate": 4.228570267201049e-07, |
|
"loss": 1.0917, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.646, |
|
"grad_norm": 10.748331069946289, |
|
"learning_rate": 4.1818635218880186e-07, |
|
"loss": 1.0612, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.648, |
|
"grad_norm": 12.968511581420898, |
|
"learning_rate": 4.1354049009521504e-07, |
|
"loss": 1.1723, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 13.352898597717285, |
|
"learning_rate": 4.089194655986306e-07, |
|
"loss": 1.127, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.652, |
|
"grad_norm": 10.655840873718262, |
|
"learning_rate": 4.043233037238281e-07, |
|
"loss": 1.0859, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.654, |
|
"grad_norm": 10.203536033630371, |
|
"learning_rate": 3.99752029360948e-07, |
|
"loss": 1.1438, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.656, |
|
"grad_norm": 12.646231651306152, |
|
"learning_rate": 3.9520566726535367e-07, |
|
"loss": 1.2127, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.658, |
|
"grad_norm": 12.770830154418945, |
|
"learning_rate": 3.90684242057498e-07, |
|
"loss": 1.0962, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 11.451815605163574, |
|
"learning_rate": 3.8618777822278854e-07, |
|
"loss": 1.0883, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.662, |
|
"grad_norm": 11.632091522216797, |
|
"learning_rate": 3.8171630011145877e-07, |
|
"loss": 1.1129, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.664, |
|
"grad_norm": 11.543761253356934, |
|
"learning_rate": 3.772698319384349e-07, |
|
"loss": 1.0953, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.666, |
|
"grad_norm": 12.997357368469238, |
|
"learning_rate": 3.728483977831998e-07, |
|
"loss": 1.1444, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.668, |
|
"grad_norm": 11.470213890075684, |
|
"learning_rate": 3.684520215896703e-07, |
|
"loss": 1.1423, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 10.749590873718262, |
|
"learning_rate": 3.6408072716606346e-07, |
|
"loss": 1.1258, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.672, |
|
"grad_norm": 10.158390045166016, |
|
"learning_rate": 3.597345381847656e-07, |
|
"loss": 1.032, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.674, |
|
"grad_norm": 10.492109298706055, |
|
"learning_rate": 3.554134781822094e-07, |
|
"loss": 1.1678, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.676, |
|
"grad_norm": 12.122913360595703, |
|
"learning_rate": 3.511175705587433e-07, |
|
"loss": 1.1777, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.678, |
|
"grad_norm": 10.95935344696045, |
|
"learning_rate": 3.468468385785023e-07, |
|
"loss": 1.1732, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 12.425519943237305, |
|
"learning_rate": 3.426013053692878e-07, |
|
"loss": 1.1649, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.682, |
|
"grad_norm": 11.766524314880371, |
|
"learning_rate": 3.3838099392243915e-07, |
|
"loss": 1.0, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.684, |
|
"grad_norm": 12.33736515045166, |
|
"learning_rate": 3.341859270927067e-07, |
|
"loss": 1.148, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.686, |
|
"grad_norm": 10.662920951843262, |
|
"learning_rate": 3.30016127598134e-07, |
|
"loss": 1.0533, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 11.27861213684082, |
|
"learning_rate": 3.258716180199278e-07, |
|
"loss": 1.114, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 11.482233047485352, |
|
"learning_rate": 3.2175242080234314e-07, |
|
"loss": 1.1744, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.692, |
|
"grad_norm": 11.544189453125, |
|
"learning_rate": 3.1765855825255543e-07, |
|
"loss": 1.0746, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.694, |
|
"grad_norm": 13.263728141784668, |
|
"learning_rate": 3.135900525405428e-07, |
|
"loss": 1.1174, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.6959999999999997, |
|
"grad_norm": 11.041204452514648, |
|
"learning_rate": 3.0954692569896585e-07, |
|
"loss": 1.1725, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.698, |
|
"grad_norm": 11.216727256774902, |
|
"learning_rate": 3.055291996230492e-07, |
|
"loss": 1.1185, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 11.795129776000977, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 1.1468, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.702, |
|
"grad_norm": 10.980323791503906, |
|
"learning_rate": 2.975700366611883e-07, |
|
"loss": 1.0928, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.7039999999999997, |
|
"grad_norm": 12.678966522216797, |
|
"learning_rate": 2.9362864287744266e-07, |
|
"loss": 1.1624, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.706, |
|
"grad_norm": 12.718985557556152, |
|
"learning_rate": 2.8971273606351656e-07, |
|
"loss": 1.141, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.708, |
|
"grad_norm": 11.222257614135742, |
|
"learning_rate": 2.858223374256841e-07, |
|
"loss": 1.1179, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 10.49862003326416, |
|
"learning_rate": 2.819574680320825e-07, |
|
"loss": 1.1192, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.7119999999999997, |
|
"grad_norm": 13.092155456542969, |
|
"learning_rate": 2.7811814881259503e-07, |
|
"loss": 1.1709, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.714, |
|
"grad_norm": 10.232337951660156, |
|
"learning_rate": 2.743044005587425e-07, |
|
"loss": 1.0475, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.716, |
|
"grad_norm": 12.870683670043945, |
|
"learning_rate": 2.705162439235648e-07, |
|
"loss": 1.0407, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.718, |
|
"grad_norm": 11.157443046569824, |
|
"learning_rate": 2.6675369942151864e-07, |
|
"loss": 1.1319, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 10.935739517211914, |
|
"learning_rate": 2.63016787428354e-07, |
|
"loss": 1.1432, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.722, |
|
"grad_norm": 12.044547080993652, |
|
"learning_rate": 2.593055281810125e-07, |
|
"loss": 1.1435, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.724, |
|
"grad_norm": 10.721463203430176, |
|
"learning_rate": 2.556199417775174e-07, |
|
"loss": 1.1649, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.726, |
|
"grad_norm": 10.794017791748047, |
|
"learning_rate": 2.519600481768597e-07, |
|
"loss": 1.1491, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.7279999999999998, |
|
"grad_norm": 11.180771827697754, |
|
"learning_rate": 2.483258671988942e-07, |
|
"loss": 1.0898, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 11.11221694946289, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 1.1323, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.732, |
|
"grad_norm": 11.723511695861816, |
|
"learning_rate": 2.4113472169413176e-07, |
|
"loss": 1.2018, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.734, |
|
"grad_norm": 12.620880126953125, |
|
"learning_rate": 2.37577796110397e-07, |
|
"loss": 1.1411, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 11.710165023803711, |
|
"learning_rate": 2.3404666103526542e-07, |
|
"loss": 1.0482, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.738, |
|
"grad_norm": 11.673789024353027, |
|
"learning_rate": 2.3054133559131163e-07, |
|
"loss": 1.2405, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 11.153477668762207, |
|
"learning_rate": 2.2706183876134047e-07, |
|
"loss": 1.1506, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.742, |
|
"grad_norm": 11.211380958557129, |
|
"learning_rate": 2.2360818938828189e-07, |
|
"loss": 1.046, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.7439999999999998, |
|
"grad_norm": 10.854473114013672, |
|
"learning_rate": 2.2018040617509174e-07, |
|
"loss": 1.0861, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.746, |
|
"grad_norm": 11.484323501586914, |
|
"learning_rate": 2.167785076846518e-07, |
|
"loss": 1.0931, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.748, |
|
"grad_norm": 12.862896919250488, |
|
"learning_rate": 2.134025123396638e-07, |
|
"loss": 1.215, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 12.15230941772461, |
|
"learning_rate": 2.1005243842255552e-07, |
|
"loss": 1.167, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 10.922401428222656, |
|
"learning_rate": 2.0672830407537925e-07, |
|
"loss": 1.1432, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.754, |
|
"grad_norm": 12.105512619018555, |
|
"learning_rate": 2.0343012729971244e-07, |
|
"loss": 1.1478, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.7560000000000002, |
|
"grad_norm": 10.566096305847168, |
|
"learning_rate": 2.0015792595656225e-07, |
|
"loss": 1.1038, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.758, |
|
"grad_norm": 12.602991104125977, |
|
"learning_rate": 1.9691171776626882e-07, |
|
"loss": 1.2035, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 10.561416625976562, |
|
"learning_rate": 1.9369152030840553e-07, |
|
"loss": 1.1399, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.762, |
|
"grad_norm": 12.064254760742188, |
|
"learning_rate": 1.904973510216912e-07, |
|
"loss": 1.2584, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.7640000000000002, |
|
"grad_norm": 10.637829780578613, |
|
"learning_rate": 1.873292272038868e-07, |
|
"loss": 1.0919, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.766, |
|
"grad_norm": 11.943845748901367, |
|
"learning_rate": 1.841871660117095e-07, |
|
"loss": 1.1898, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.768, |
|
"grad_norm": 11.168000221252441, |
|
"learning_rate": 1.8107118446073492e-07, |
|
"loss": 1.1557, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 10.773613929748535, |
|
"learning_rate": 1.779812994253055e-07, |
|
"loss": 1.1016, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.7720000000000002, |
|
"grad_norm": 10.496236801147461, |
|
"learning_rate": 1.7491752763844294e-07, |
|
"loss": 1.0975, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.774, |
|
"grad_norm": 11.72801399230957, |
|
"learning_rate": 1.7187988569175307e-07, |
|
"loss": 1.0915, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.776, |
|
"grad_norm": 11.977339744567871, |
|
"learning_rate": 1.688683900353366e-07, |
|
"loss": 1.1441, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.778, |
|
"grad_norm": 10.9048490524292, |
|
"learning_rate": 1.6588305697770313e-07, |
|
"loss": 1.0953, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.7800000000000002, |
|
"grad_norm": 10.485379219055176, |
|
"learning_rate": 1.6292390268568103e-07, |
|
"loss": 1.0981, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.782, |
|
"grad_norm": 10.757002830505371, |
|
"learning_rate": 1.5999094318432662e-07, |
|
"loss": 1.1695, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 11.401092529296875, |
|
"learning_rate": 1.5708419435684463e-07, |
|
"loss": 1.0509, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.786, |
|
"grad_norm": 11.38297176361084, |
|
"learning_rate": 1.5420367194449448e-07, |
|
"loss": 1.0742, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.7880000000000003, |
|
"grad_norm": 10.606424331665039, |
|
"learning_rate": 1.5134939154651196e-07, |
|
"loss": 1.1533, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 11.047826766967773, |
|
"learning_rate": 1.4852136862001766e-07, |
|
"loss": 1.15, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.792, |
|
"grad_norm": 11.345439910888672, |
|
"learning_rate": 1.4571961847993977e-07, |
|
"loss": 1.2146, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.794, |
|
"grad_norm": 11.464273452758789, |
|
"learning_rate": 1.4294415629892756e-07, |
|
"loss": 1.0851, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.7960000000000003, |
|
"grad_norm": 11.990900039672852, |
|
"learning_rate": 1.4019499710726913e-07, |
|
"loss": 1.1197, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.798, |
|
"grad_norm": 11.901836395263672, |
|
"learning_rate": 1.374721557928116e-07, |
|
"loss": 1.2454, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 10.661705017089844, |
|
"learning_rate": 1.3477564710088097e-07, |
|
"loss": 1.0545, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.802, |
|
"grad_norm": 11.410598754882812, |
|
"learning_rate": 1.3210548563419857e-07, |
|
"loss": 1.0326, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.8040000000000003, |
|
"grad_norm": 11.102482795715332, |
|
"learning_rate": 1.294616858528064e-07, |
|
"loss": 1.1085, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.806, |
|
"grad_norm": 10.520735740661621, |
|
"learning_rate": 1.268442620739868e-07, |
|
"loss": 1.1905, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.808, |
|
"grad_norm": 11.018781661987305, |
|
"learning_rate": 1.2425322847218368e-07, |
|
"loss": 1.1735, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 12.908723831176758, |
|
"learning_rate": 1.2168859907892904e-07, |
|
"loss": 1.1788, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.8120000000000003, |
|
"grad_norm": 11.897324562072754, |
|
"learning_rate": 1.1915038778276212e-07, |
|
"loss": 1.093, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.814, |
|
"grad_norm": 11.710148811340332, |
|
"learning_rate": 1.166386083291604e-07, |
|
"loss": 1.1192, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 11.686257362365723, |
|
"learning_rate": 1.1415327432046041e-07, |
|
"loss": 1.0824, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.818, |
|
"grad_norm": 11.252178192138672, |
|
"learning_rate": 1.1169439921578485e-07, |
|
"loss": 1.0743, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 10.60265064239502, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 1.0421, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.822, |
|
"grad_norm": 12.824352264404297, |
|
"learning_rate": 1.0685607883850035e-07, |
|
"loss": 1.1632, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.824, |
|
"grad_norm": 11.57745361328125, |
|
"learning_rate": 1.044766597674196e-07, |
|
"loss": 1.1655, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.826, |
|
"grad_norm": 11.447077751159668, |
|
"learning_rate": 1.0212375200327973e-07, |
|
"loss": 1.0834, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.828, |
|
"grad_norm": 12.608105659484863, |
|
"learning_rate": 9.979736828806096e-08, |
|
"loss": 1.1674, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 11.511768341064453, |
|
"learning_rate": 9.749752122010347e-08, |
|
"loss": 1.2345, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 10.86443042755127, |
|
"learning_rate": 9.522422325404234e-08, |
|
"loss": 1.0663, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.834, |
|
"grad_norm": 11.792540550231934, |
|
"learning_rate": 9.297748670073658e-08, |
|
"loss": 1.0739, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.836, |
|
"grad_norm": 12.60987377166748, |
|
"learning_rate": 9.075732372720414e-08, |
|
"loss": 1.121, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.838, |
|
"grad_norm": 11.70394515991211, |
|
"learning_rate": 8.856374635655696e-08, |
|
"loss": 1.1565, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 11.06529426574707, |
|
"learning_rate": 8.639676646793382e-08, |
|
"loss": 1.0428, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.842, |
|
"grad_norm": 11.53507137298584, |
|
"learning_rate": 8.425639579643763e-08, |
|
"loss": 1.094, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.844, |
|
"grad_norm": 10.740865707397461, |
|
"learning_rate": 8.214264593307097e-08, |
|
"loss": 1.0513, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.846, |
|
"grad_norm": 11.995918273925781, |
|
"learning_rate": 8.00555283246729e-08, |
|
"loss": 1.1279, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 2.848, |
|
"grad_norm": 12.916094779968262, |
|
"learning_rate": 7.799505427386001e-08, |
|
"loss": 1.1227, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 10.555110931396484, |
|
"learning_rate": 7.59612349389599e-08, |
|
"loss": 1.0907, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.852, |
|
"grad_norm": 11.170307159423828, |
|
"learning_rate": 7.395408133395509e-08, |
|
"loss": 1.1291, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.854, |
|
"grad_norm": 10.48733901977539, |
|
"learning_rate": 7.197360432842359e-08, |
|
"loss": 1.1168, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 2.856, |
|
"grad_norm": 11.542524337768555, |
|
"learning_rate": 7.001981464747565e-08, |
|
"loss": 1.1142, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.858, |
|
"grad_norm": 10.778676986694336, |
|
"learning_rate": 6.809272287169988e-08, |
|
"loss": 1.1537, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 10.768854141235352, |
|
"learning_rate": 6.61923394371039e-08, |
|
"loss": 1.0235, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.862, |
|
"grad_norm": 10.812145233154297, |
|
"learning_rate": 6.431867463506047e-08, |
|
"loss": 1.1195, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 2.864, |
|
"grad_norm": 10.739622116088867, |
|
"learning_rate": 6.247173861224753e-08, |
|
"loss": 1.1552, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 2.866, |
|
"grad_norm": 10.252985954284668, |
|
"learning_rate": 6.065154137059603e-08, |
|
"loss": 1.1009, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 2.868, |
|
"grad_norm": 11.594807624816895, |
|
"learning_rate": 5.8858092767236084e-08, |
|
"loss": 1.1601, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 10.98749828338623, |
|
"learning_rate": 5.709140251444201e-08, |
|
"loss": 1.1708, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.872, |
|
"grad_norm": 11.422292709350586, |
|
"learning_rate": 5.535148017958014e-08, |
|
"loss": 1.0827, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 2.874, |
|
"grad_norm": 10.309944152832031, |
|
"learning_rate": 5.363833518505834e-08, |
|
"loss": 1.1094, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 2.876, |
|
"grad_norm": 11.423945426940918, |
|
"learning_rate": 5.19519768082738e-08, |
|
"loss": 1.2409, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 2.878, |
|
"grad_norm": 10.608428001403809, |
|
"learning_rate": 5.029241418156139e-08, |
|
"loss": 1.1391, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 13.38857650756836, |
|
"learning_rate": 4.865965629214819e-08, |
|
"loss": 1.1201, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.882, |
|
"grad_norm": 11.526823997497559, |
|
"learning_rate": 4.7053711982101294e-08, |
|
"loss": 1.145, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 2.884, |
|
"grad_norm": 11.45752239227295, |
|
"learning_rate": 4.5474589948280026e-08, |
|
"loss": 1.2218, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.886, |
|
"grad_norm": 10.46944808959961, |
|
"learning_rate": 4.392229874229159e-08, |
|
"loss": 1.2481, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 2.888, |
|
"grad_norm": 12.171918869018555, |
|
"learning_rate": 4.2396846770441644e-08, |
|
"loss": 1.1381, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 11.57470703125, |
|
"learning_rate": 4.0898242293691546e-08, |
|
"loss": 1.1412, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.892, |
|
"grad_norm": 11.620773315429688, |
|
"learning_rate": 3.9426493427611177e-08, |
|
"loss": 1.1664, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 2.894, |
|
"grad_norm": 12.99191951751709, |
|
"learning_rate": 3.7981608142335644e-08, |
|
"loss": 1.2922, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 2.896, |
|
"grad_norm": 11.910186767578125, |
|
"learning_rate": 3.65635942625242e-08, |
|
"loss": 1.285, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 2.898, |
|
"grad_norm": 11.08059310913086, |
|
"learning_rate": 3.517245946731529e-08, |
|
"loss": 1.1577, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 11.138588905334473, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 1.1573, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.902, |
|
"grad_norm": 10.384156227111816, |
|
"learning_rate": 3.247085711940878e-08, |
|
"loss": 1.124, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 2.904, |
|
"grad_norm": 12.75297737121582, |
|
"learning_rate": 3.1160404197018155e-08, |
|
"loss": 1.1535, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 2.906, |
|
"grad_norm": 12.249570846557617, |
|
"learning_rate": 2.9876859619764606e-08, |
|
"loss": 1.1892, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 2.908, |
|
"grad_norm": 12.755828857421875, |
|
"learning_rate": 2.8620230338578526e-08, |
|
"loss": 1.0924, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 13.399191856384277, |
|
"learning_rate": 2.7390523158633552e-08, |
|
"loss": 1.0451, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.912, |
|
"grad_norm": 11.16951847076416, |
|
"learning_rate": 2.6187744739308297e-08, |
|
"loss": 1.0882, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.914, |
|
"grad_norm": 10.711827278137207, |
|
"learning_rate": 2.501190159415079e-08, |
|
"loss": 1.164, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 2.916, |
|
"grad_norm": 10.825516700744629, |
|
"learning_rate": 2.386300009084408e-08, |
|
"loss": 1.0708, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 2.918, |
|
"grad_norm": 11.371330261230469, |
|
"learning_rate": 2.27410464511707e-08, |
|
"loss": 1.1395, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 11.252603530883789, |
|
"learning_rate": 2.1646046750978255e-08, |
|
"loss": 1.1699, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.922, |
|
"grad_norm": 11.264942169189453, |
|
"learning_rate": 2.057800692014833e-08, |
|
"loss": 1.0813, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 2.924, |
|
"grad_norm": 11.598838806152344, |
|
"learning_rate": 1.953693274256374e-08, |
|
"loss": 1.124, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 2.926, |
|
"grad_norm": 10.306319236755371, |
|
"learning_rate": 1.8522829856076895e-08, |
|
"loss": 1.0401, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 11.464588165283203, |
|
"learning_rate": 1.753570375247815e-08, |
|
"loss": 1.154, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 11.513689994812012, |
|
"learning_rate": 1.657555977746972e-08, |
|
"loss": 1.0962, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.932, |
|
"grad_norm": 11.152294158935547, |
|
"learning_rate": 1.5642403130632367e-08, |
|
"loss": 1.119, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 2.934, |
|
"grad_norm": 11.795382499694824, |
|
"learning_rate": 1.4736238865398766e-08, |
|
"loss": 1.1594, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 2.936, |
|
"grad_norm": 13.539538383483887, |
|
"learning_rate": 1.3857071889029073e-08, |
|
"loss": 1.1061, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 2.9379999999999997, |
|
"grad_norm": 10.830578804016113, |
|
"learning_rate": 1.3004906962578723e-08, |
|
"loss": 1.114, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 12.558431625366211, |
|
"learning_rate": 1.2179748700879013e-08, |
|
"loss": 1.2487, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.942, |
|
"grad_norm": 11.865883827209473, |
|
"learning_rate": 1.1381601572505452e-08, |
|
"loss": 1.0191, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 10.461407661437988, |
|
"learning_rate": 1.0610469899760001e-08, |
|
"loss": 1.1221, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 2.9459999999999997, |
|
"grad_norm": 11.201630592346191, |
|
"learning_rate": 9.866357858642206e-09, |
|
"loss": 0.9862, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 2.948, |
|
"grad_norm": 11.596713066101074, |
|
"learning_rate": 9.14926947883088e-09, |
|
"loss": 1.2098, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 10.908271789550781, |
|
"learning_rate": 8.459208643659122e-09, |
|
"loss": 1.1182, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.952, |
|
"grad_norm": 11.701807975769043, |
|
"learning_rate": 7.796179090094891e-09, |
|
"loss": 1.2094, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 2.9539999999999997, |
|
"grad_norm": 13.19516658782959, |
|
"learning_rate": 7.160184408721571e-09, |
|
"loss": 1.1626, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 2.956, |
|
"grad_norm": 12.156759262084961, |
|
"learning_rate": 6.551228043715218e-09, |
|
"loss": 1.1171, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.958, |
|
"grad_norm": 10.075406074523926, |
|
"learning_rate": 5.969313292830126e-09, |
|
"loss": 0.9876, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 11.928035736083984, |
|
"learning_rate": 5.414443307377171e-09, |
|
"loss": 1.1166, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.9619999999999997, |
|
"grad_norm": 10.68159294128418, |
|
"learning_rate": 4.8866210922110525e-09, |
|
"loss": 1.058, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 2.964, |
|
"grad_norm": 11.216858863830566, |
|
"learning_rate": 4.385849505708084e-09, |
|
"loss": 1.1111, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 2.966, |
|
"grad_norm": 12.130516052246094, |
|
"learning_rate": 3.912131259757313e-09, |
|
"loss": 1.1494, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 2.968, |
|
"grad_norm": 10.646476745605469, |
|
"learning_rate": 3.4654689197405335e-09, |
|
"loss": 1.0957, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 2.9699999999999998, |
|
"grad_norm": 11.538399696350098, |
|
"learning_rate": 3.0458649045211897e-09, |
|
"loss": 1.1852, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.972, |
|
"grad_norm": 12.342727661132812, |
|
"learning_rate": 2.6533214864310485e-09, |
|
"loss": 1.1064, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 2.974, |
|
"grad_norm": 11.315055847167969, |
|
"learning_rate": 2.287840791256324e-09, |
|
"loss": 1.0906, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 12.045461654663086, |
|
"learning_rate": 1.9494247982282386e-09, |
|
"loss": 1.163, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 2.9779999999999998, |
|
"grad_norm": 10.301689147949219, |
|
"learning_rate": 1.638075340010814e-09, |
|
"loss": 1.1269, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 11.46061897277832, |
|
"learning_rate": 1.3537941026914302e-09, |
|
"loss": 1.0705, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.982, |
|
"grad_norm": 10.900529861450195, |
|
"learning_rate": 1.096582625772502e-09, |
|
"loss": 1.099, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 2.984, |
|
"grad_norm": 10.490682601928711, |
|
"learning_rate": 8.664423021614854e-10, |
|
"loss": 1.0517, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 2.9859999999999998, |
|
"grad_norm": 11.499110221862793, |
|
"learning_rate": 6.633743781642166e-10, |
|
"loss": 1.177, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 2.988, |
|
"grad_norm": 11.795324325561523, |
|
"learning_rate": 4.87379953478806e-10, |
|
"loss": 1.1048, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 11.121499061584473, |
|
"learning_rate": 3.384599811889766e-10, |
|
"loss": 1.1005, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.992, |
|
"grad_norm": 11.869853973388672, |
|
"learning_rate": 2.1661526775795804e-10, |
|
"loss": 1.1684, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 2.9939999999999998, |
|
"grad_norm": 13.046727180480957, |
|
"learning_rate": 1.2184647302626585e-10, |
|
"loss": 1.0746, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 2.996, |
|
"grad_norm": 12.80275821685791, |
|
"learning_rate": 5.4154110206150465e-11, |
|
"loss": 1.1424, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 2.998, |
|
"grad_norm": 13.787501335144043, |
|
"learning_rate": 1.3538545881042198e-11, |
|
"loss": 1.172, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 11.100687026977539, |
|
"learning_rate": 0.0, |
|
"loss": 1.1813, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1500, |
|
"total_flos": 3.656864303572582e+16, |
|
"train_loss": 0.0, |
|
"train_runtime": 15.8388, |
|
"train_samples_per_second": 9091.618, |
|
"train_steps_per_second": 94.704 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.656864303572582e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|