| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 846, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0035460992907801418, | |
| "grad_norm": 14.04724696500921, | |
| "learning_rate": 0.0, | |
| "loss": 1.8587, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0070921985815602835, | |
| "grad_norm": 14.602415786818106, | |
| "learning_rate": 2.3529411764705883e-07, | |
| "loss": 1.9007, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.010638297872340425, | |
| "grad_norm": 14.76042346081964, | |
| "learning_rate": 4.7058823529411767e-07, | |
| "loss": 1.9022, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.014184397163120567, | |
| "grad_norm": 14.481514822839404, | |
| "learning_rate": 7.058823529411766e-07, | |
| "loss": 1.9044, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01773049645390071, | |
| "grad_norm": 14.264365275251663, | |
| "learning_rate": 9.411764705882353e-07, | |
| "loss": 1.9279, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02127659574468085, | |
| "grad_norm": 13.7952134429284, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 1.9164, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.024822695035460994, | |
| "grad_norm": 14.28102655974937, | |
| "learning_rate": 1.4117647058823531e-06, | |
| "loss": 1.9369, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.028368794326241134, | |
| "grad_norm": 12.30872463822712, | |
| "learning_rate": 1.6470588235294118e-06, | |
| "loss": 1.8994, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.031914893617021274, | |
| "grad_norm": 11.611863485631176, | |
| "learning_rate": 1.8823529411764707e-06, | |
| "loss": 1.845, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03546099290780142, | |
| "grad_norm": 8.00669314869585, | |
| "learning_rate": 2.1176470588235296e-06, | |
| "loss": 1.7459, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03900709219858156, | |
| "grad_norm": 7.053422475492129, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 1.6914, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.0425531914893617, | |
| "grad_norm": 6.82884339451663, | |
| "learning_rate": 2.5882352941176473e-06, | |
| "loss": 1.7283, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04609929078014184, | |
| "grad_norm": 3.283572533292923, | |
| "learning_rate": 2.8235294117647062e-06, | |
| "loss": 1.6065, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04964539007092199, | |
| "grad_norm": 2.685692798754663, | |
| "learning_rate": 3.058823529411765e-06, | |
| "loss": 1.5898, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.05319148936170213, | |
| "grad_norm": 2.5132795524411335, | |
| "learning_rate": 3.2941176470588236e-06, | |
| "loss": 1.5992, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.05673758865248227, | |
| "grad_norm": 2.1153617069636295, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 1.5119, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06028368794326241, | |
| "grad_norm": 2.1115729682872924, | |
| "learning_rate": 3.7647058823529414e-06, | |
| "loss": 1.5371, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.06382978723404255, | |
| "grad_norm": 2.830840809822407, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.525, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0673758865248227, | |
| "grad_norm": 3.1825497271706578, | |
| "learning_rate": 4.235294117647059e-06, | |
| "loss": 1.5146, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.07092198581560284, | |
| "grad_norm": 2.934535682603558, | |
| "learning_rate": 4.4705882352941184e-06, | |
| "loss": 1.4848, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07446808510638298, | |
| "grad_norm": 2.4904059244565486, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 1.4281, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07801418439716312, | |
| "grad_norm": 1.965660747063884, | |
| "learning_rate": 4.941176470588236e-06, | |
| "loss": 1.4339, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.08156028368794327, | |
| "grad_norm": 1.601702138082789, | |
| "learning_rate": 5.176470588235295e-06, | |
| "loss": 1.4012, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0851063829787234, | |
| "grad_norm": 1.1765313349167572, | |
| "learning_rate": 5.411764705882353e-06, | |
| "loss": 1.3595, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.08865248226950355, | |
| "grad_norm": 1.181473952846224, | |
| "learning_rate": 5.6470588235294125e-06, | |
| "loss": 1.3754, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.09219858156028368, | |
| "grad_norm": 1.2232305593346886, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.3597, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.09574468085106383, | |
| "grad_norm": 1.200373129757842, | |
| "learning_rate": 6.11764705882353e-06, | |
| "loss": 1.3325, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.09929078014184398, | |
| "grad_norm": 0.9974822666519331, | |
| "learning_rate": 6.352941176470589e-06, | |
| "loss": 1.303, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.10283687943262411, | |
| "grad_norm": 0.8261258243562135, | |
| "learning_rate": 6.588235294117647e-06, | |
| "loss": 1.3272, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.10638297872340426, | |
| "grad_norm": 0.6976037915101265, | |
| "learning_rate": 6.8235294117647065e-06, | |
| "loss": 1.2773, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1099290780141844, | |
| "grad_norm": 0.7026199238348708, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 1.2758, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.11347517730496454, | |
| "grad_norm": 0.680951408768359, | |
| "learning_rate": 7.294117647058823e-06, | |
| "loss": 1.264, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.11702127659574468, | |
| "grad_norm": 0.669311532690681, | |
| "learning_rate": 7.529411764705883e-06, | |
| "loss": 1.2209, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.12056737588652482, | |
| "grad_norm": 0.6005160404767125, | |
| "learning_rate": 7.764705882352941e-06, | |
| "loss": 1.1828, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.12411347517730496, | |
| "grad_norm": 0.7326603813019236, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.2431, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1276595744680851, | |
| "grad_norm": 0.5835009779668501, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 1.241, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.13120567375886524, | |
| "grad_norm": 0.5378943901741212, | |
| "learning_rate": 8.470588235294118e-06, | |
| "loss": 1.1968, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1347517730496454, | |
| "grad_norm": 0.572579019039888, | |
| "learning_rate": 8.705882352941177e-06, | |
| "loss": 1.2327, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.13829787234042554, | |
| "grad_norm": 0.5071056767777127, | |
| "learning_rate": 8.941176470588237e-06, | |
| "loss": 1.1909, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.14184397163120568, | |
| "grad_norm": 0.5076864263245315, | |
| "learning_rate": 9.176470588235294e-06, | |
| "loss": 1.1914, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1453900709219858, | |
| "grad_norm": 0.4768242650196604, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 1.2212, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.14893617021276595, | |
| "grad_norm": 0.5175086914458062, | |
| "learning_rate": 9.647058823529412e-06, | |
| "loss": 1.1533, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1524822695035461, | |
| "grad_norm": 0.469141750165397, | |
| "learning_rate": 9.882352941176472e-06, | |
| "loss": 1.1962, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.15602836879432624, | |
| "grad_norm": 0.4713622627510269, | |
| "learning_rate": 1.011764705882353e-05, | |
| "loss": 1.202, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1595744680851064, | |
| "grad_norm": 0.43318402798290195, | |
| "learning_rate": 1.035294117647059e-05, | |
| "loss": 1.1557, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.16312056737588654, | |
| "grad_norm": 0.4320649172294336, | |
| "learning_rate": 1.0588235294117648e-05, | |
| "loss": 1.1794, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 0.4115353237514731, | |
| "learning_rate": 1.0823529411764706e-05, | |
| "loss": 1.1802, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1702127659574468, | |
| "grad_norm": 0.4298696060251368, | |
| "learning_rate": 1.1058823529411766e-05, | |
| "loss": 1.1723, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.17375886524822695, | |
| "grad_norm": 0.42417079988997497, | |
| "learning_rate": 1.1294117647058825e-05, | |
| "loss": 1.1788, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1773049645390071, | |
| "grad_norm": 0.4067328480323003, | |
| "learning_rate": 1.1529411764705882e-05, | |
| "loss": 1.1431, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.18085106382978725, | |
| "grad_norm": 0.3874360377877721, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 1.1142, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.18439716312056736, | |
| "grad_norm": 0.4675215039516683, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.1023, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.1879432624113475, | |
| "grad_norm": 0.4205563606474083, | |
| "learning_rate": 1.223529411764706e-05, | |
| "loss": 1.1854, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.19148936170212766, | |
| "grad_norm": 0.41018245492071165, | |
| "learning_rate": 1.2470588235294119e-05, | |
| "loss": 1.1658, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1950354609929078, | |
| "grad_norm": 0.3620344825134141, | |
| "learning_rate": 1.2705882352941177e-05, | |
| "loss": 1.1242, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.19858156028368795, | |
| "grad_norm": 0.40939886980642687, | |
| "learning_rate": 1.2941176470588238e-05, | |
| "loss": 1.1287, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.20212765957446807, | |
| "grad_norm": 0.39840938018772826, | |
| "learning_rate": 1.3176470588235294e-05, | |
| "loss": 1.1755, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.20567375886524822, | |
| "grad_norm": 0.3972864117185851, | |
| "learning_rate": 1.3411764705882353e-05, | |
| "loss": 1.1524, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.20921985815602837, | |
| "grad_norm": 0.35963503957203713, | |
| "learning_rate": 1.3647058823529413e-05, | |
| "loss": 1.1172, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2127659574468085, | |
| "grad_norm": 0.3934205934197654, | |
| "learning_rate": 1.3882352941176471e-05, | |
| "loss": 1.099, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.21631205673758866, | |
| "grad_norm": 0.36473534875394586, | |
| "learning_rate": 1.4117647058823532e-05, | |
| "loss": 1.094, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2198581560283688, | |
| "grad_norm": 0.38310616016878557, | |
| "learning_rate": 1.435294117647059e-05, | |
| "loss": 1.1315, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.22340425531914893, | |
| "grad_norm": 0.38310118046511205, | |
| "learning_rate": 1.4588235294117647e-05, | |
| "loss": 1.0979, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.22695035460992907, | |
| "grad_norm": 0.40071709534499794, | |
| "learning_rate": 1.4823529411764707e-05, | |
| "loss": 1.1292, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.23049645390070922, | |
| "grad_norm": 0.40343417157026384, | |
| "learning_rate": 1.5058823529411765e-05, | |
| "loss": 1.1497, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.23404255319148937, | |
| "grad_norm": 0.38981040775595505, | |
| "learning_rate": 1.5294117647058822e-05, | |
| "loss": 1.0976, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.2375886524822695, | |
| "grad_norm": 0.4844512170653719, | |
| "learning_rate": 1.5529411764705882e-05, | |
| "loss": 1.129, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.24113475177304963, | |
| "grad_norm": 0.3730642827040671, | |
| "learning_rate": 1.5764705882352943e-05, | |
| "loss": 1.0611, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.24468085106382978, | |
| "grad_norm": 0.38672469429859196, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.0984, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.24822695035460993, | |
| "grad_norm": 0.4027051220542017, | |
| "learning_rate": 1.623529411764706e-05, | |
| "loss": 1.0971, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.25177304964539005, | |
| "grad_norm": 0.38792933242967387, | |
| "learning_rate": 1.647058823529412e-05, | |
| "loss": 1.1201, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 0.40861480699220193, | |
| "learning_rate": 1.670588235294118e-05, | |
| "loss": 1.1313, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.25886524822695034, | |
| "grad_norm": 0.3649020493754554, | |
| "learning_rate": 1.6941176470588237e-05, | |
| "loss": 1.0918, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2624113475177305, | |
| "grad_norm": 0.38869825009731374, | |
| "learning_rate": 1.7176470588235293e-05, | |
| "loss": 1.0971, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.26595744680851063, | |
| "grad_norm": 0.3869921912718602, | |
| "learning_rate": 1.7411764705882353e-05, | |
| "loss": 1.0756, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2695035460992908, | |
| "grad_norm": 0.37677316889298695, | |
| "learning_rate": 1.7647058823529414e-05, | |
| "loss": 1.0792, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.2730496453900709, | |
| "grad_norm": 0.36310119840141764, | |
| "learning_rate": 1.7882352941176474e-05, | |
| "loss": 1.095, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.2765957446808511, | |
| "grad_norm": 0.3817251391796483, | |
| "learning_rate": 1.811764705882353e-05, | |
| "loss": 1.1147, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.2801418439716312, | |
| "grad_norm": 0.38683476125531163, | |
| "learning_rate": 1.8352941176470587e-05, | |
| "loss": 1.1098, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.28368794326241137, | |
| "grad_norm": 0.36958806331472616, | |
| "learning_rate": 1.8588235294117647e-05, | |
| "loss": 1.0726, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2872340425531915, | |
| "grad_norm": 0.39165579816640134, | |
| "learning_rate": 1.8823529411764708e-05, | |
| "loss": 1.0831, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.2907801418439716, | |
| "grad_norm": 0.4058344969874132, | |
| "learning_rate": 1.9058823529411764e-05, | |
| "loss": 1.0735, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.29432624113475175, | |
| "grad_norm": 0.4030952561691915, | |
| "learning_rate": 1.9294117647058825e-05, | |
| "loss": 1.0924, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.2978723404255319, | |
| "grad_norm": 0.42274227026304756, | |
| "learning_rate": 1.9529411764705885e-05, | |
| "loss": 1.1099, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.30141843971631205, | |
| "grad_norm": 0.40979457721603246, | |
| "learning_rate": 1.9764705882352945e-05, | |
| "loss": 1.0618, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3049645390070922, | |
| "grad_norm": 0.385822581453486, | |
| "learning_rate": 2e-05, | |
| "loss": 1.0432, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.30851063829787234, | |
| "grad_norm": 0.4283573534577983, | |
| "learning_rate": 1.99999147881843e-05, | |
| "loss": 1.0875, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3120567375886525, | |
| "grad_norm": 0.41563178697289727, | |
| "learning_rate": 1.9999659154189404e-05, | |
| "loss": 1.0805, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.31560283687943264, | |
| "grad_norm": 0.3899019912130811, | |
| "learning_rate": 1.9999233102371918e-05, | |
| "loss": 1.0768, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 0.3992722195667444, | |
| "learning_rate": 1.9998636639992776e-05, | |
| "loss": 1.1252, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.32269503546099293, | |
| "grad_norm": 0.41052367607056267, | |
| "learning_rate": 1.9997869777217106e-05, | |
| "loss": 1.069, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.3262411347517731, | |
| "grad_norm": 0.41728106430532, | |
| "learning_rate": 1.9996932527114064e-05, | |
| "loss": 1.0592, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.32978723404255317, | |
| "grad_norm": 0.3912128813820819, | |
| "learning_rate": 1.99958249056566e-05, | |
| "loss": 1.0442, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.40709314071102765, | |
| "learning_rate": 1.9994546931721204e-05, | |
| "loss": 1.1249, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.33687943262411346, | |
| "grad_norm": 0.4223398268189614, | |
| "learning_rate": 1.9993098627087576e-05, | |
| "loss": 1.0949, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3404255319148936, | |
| "grad_norm": 0.44711495627147035, | |
| "learning_rate": 1.999148001643824e-05, | |
| "loss": 1.0807, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.34397163120567376, | |
| "grad_norm": 0.39883160757012304, | |
| "learning_rate": 1.998969112735816e-05, | |
| "loss": 1.1175, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3475177304964539, | |
| "grad_norm": 0.4124247088388006, | |
| "learning_rate": 1.9987731990334225e-05, | |
| "loss": 1.0469, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.35106382978723405, | |
| "grad_norm": 0.8676420162826889, | |
| "learning_rate": 1.9985602638754758e-05, | |
| "loss": 1.0547, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3546099290780142, | |
| "grad_norm": 0.4539645796907816, | |
| "learning_rate": 1.9983303108908946e-05, | |
| "loss": 1.071, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.35815602836879434, | |
| "grad_norm": 0.4103701258745225, | |
| "learning_rate": 1.998083343998621e-05, | |
| "loss": 1.0597, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3617021276595745, | |
| "grad_norm": 0.38720212977229745, | |
| "learning_rate": 1.9978193674075547e-05, | |
| "loss": 1.052, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.36524822695035464, | |
| "grad_norm": 0.41572032365047884, | |
| "learning_rate": 1.9975383856164798e-05, | |
| "loss": 1.0686, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.36879432624113473, | |
| "grad_norm": 0.4082614940841916, | |
| "learning_rate": 1.9972404034139913e-05, | |
| "loss": 1.0855, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3723404255319149, | |
| "grad_norm": 0.37549114805028977, | |
| "learning_rate": 1.996925425878409e-05, | |
| "loss": 1.047, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.375886524822695, | |
| "grad_norm": 0.4610243331143955, | |
| "learning_rate": 1.9965934583776948e-05, | |
| "loss": 1.0636, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.37943262411347517, | |
| "grad_norm": 0.44503123216260176, | |
| "learning_rate": 1.99624450656936e-05, | |
| "loss": 1.0526, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3829787234042553, | |
| "grad_norm": 0.4149811610983327, | |
| "learning_rate": 1.995878576400367e-05, | |
| "loss": 1.0076, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.38652482269503546, | |
| "grad_norm": 0.3918182369922544, | |
| "learning_rate": 1.9954956741070312e-05, | |
| "loss": 1.0797, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3900709219858156, | |
| "grad_norm": 0.435027379258559, | |
| "learning_rate": 1.9950958062149126e-05, | |
| "loss": 1.0864, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.39361702127659576, | |
| "grad_norm": 0.3879332371338635, | |
| "learning_rate": 1.9946789795387048e-05, | |
| "loss": 1.0949, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3971631205673759, | |
| "grad_norm": 0.4109671535287323, | |
| "learning_rate": 1.9942452011821195e-05, | |
| "loss": 1.0456, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.40070921985815605, | |
| "grad_norm": 0.39837338883362244, | |
| "learning_rate": 1.993794478537765e-05, | |
| "loss": 1.0678, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.40425531914893614, | |
| "grad_norm": 0.48242943738691013, | |
| "learning_rate": 1.9933268192870202e-05, | |
| "loss": 1.0972, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.4078014184397163, | |
| "grad_norm": 0.4147047600582376, | |
| "learning_rate": 1.9928422313999038e-05, | |
| "loss": 1.0471, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.41134751773049644, | |
| "grad_norm": 0.4185632224192961, | |
| "learning_rate": 1.9923407231349386e-05, | |
| "loss": 1.0918, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4148936170212766, | |
| "grad_norm": 0.41380413853065684, | |
| "learning_rate": 1.991822303039011e-05, | |
| "loss": 1.0642, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.41843971631205673, | |
| "grad_norm": 0.40984905075653666, | |
| "learning_rate": 1.991286979947224e-05, | |
| "loss": 1.0911, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.4219858156028369, | |
| "grad_norm": 0.4346507481006875, | |
| "learning_rate": 1.990734762982748e-05, | |
| "loss": 1.0524, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 0.3802797846709125, | |
| "learning_rate": 1.9901656615566655e-05, | |
| "loss": 1.0371, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.42907801418439717, | |
| "grad_norm": 0.43994501408678205, | |
| "learning_rate": 1.9895796853678096e-05, | |
| "loss": 1.0402, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4326241134751773, | |
| "grad_norm": 0.47314319261278487, | |
| "learning_rate": 1.988976844402599e-05, | |
| "loss": 1.0624, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.43617021276595747, | |
| "grad_norm": 0.43887512296501896, | |
| "learning_rate": 1.988357148934869e-05, | |
| "loss": 1.0642, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.4397163120567376, | |
| "grad_norm": 0.4128076475964722, | |
| "learning_rate": 1.9877206095256935e-05, | |
| "loss": 1.0363, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.4432624113475177, | |
| "grad_norm": 0.4348332361717801, | |
| "learning_rate": 1.9870672370232097e-05, | |
| "loss": 1.0219, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.44680851063829785, | |
| "grad_norm": 0.4035353186595829, | |
| "learning_rate": 1.986397042562428e-05, | |
| "loss": 1.0277, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.450354609929078, | |
| "grad_norm": 0.4054498285955409, | |
| "learning_rate": 1.985710037565047e-05, | |
| "loss": 1.0883, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.45390070921985815, | |
| "grad_norm": 0.39687366636112803, | |
| "learning_rate": 1.9850062337392536e-05, | |
| "loss": 1.0607, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.4574468085106383, | |
| "grad_norm": 0.4405171428453929, | |
| "learning_rate": 1.9842856430795298e-05, | |
| "loss": 1.0564, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.46099290780141844, | |
| "grad_norm": 0.41170770013631397, | |
| "learning_rate": 1.9835482778664426e-05, | |
| "loss": 1.0311, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4645390070921986, | |
| "grad_norm": 0.39274848149852354, | |
| "learning_rate": 1.9827941506664378e-05, | |
| "loss": 1.0626, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.46808510638297873, | |
| "grad_norm": 0.43188276490314575, | |
| "learning_rate": 1.9820232743316244e-05, | |
| "loss": 1.0597, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4716312056737589, | |
| "grad_norm": 0.4032899104523295, | |
| "learning_rate": 1.981235661999558e-05, | |
| "loss": 1.06, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.475177304964539, | |
| "grad_norm": 0.45040621241859885, | |
| "learning_rate": 1.9804313270930134e-05, | |
| "loss": 1.078, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4787234042553192, | |
| "grad_norm": 0.38105861324144824, | |
| "learning_rate": 1.979610283319758e-05, | |
| "loss": 1.0653, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.48226950354609927, | |
| "grad_norm": 0.4048312669954708, | |
| "learning_rate": 1.978772544672318e-05, | |
| "loss": 1.0469, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.4858156028368794, | |
| "grad_norm": 0.39934964581590104, | |
| "learning_rate": 1.9779181254277397e-05, | |
| "loss": 1.0482, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.48936170212765956, | |
| "grad_norm": 0.4297490738832722, | |
| "learning_rate": 1.9770470401473462e-05, | |
| "loss": 1.0751, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4929078014184397, | |
| "grad_norm": 0.3732722270246798, | |
| "learning_rate": 1.9761593036764894e-05, | |
| "loss": 1.02, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.49645390070921985, | |
| "grad_norm": 0.40981082693027104, | |
| "learning_rate": 1.975254931144296e-05, | |
| "loss": 1.0506, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.3811499359219313, | |
| "learning_rate": 1.9743339379634117e-05, | |
| "loss": 1.0495, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.5035460992907801, | |
| "grad_norm": 0.38491189378621854, | |
| "learning_rate": 1.9733963398297365e-05, | |
| "loss": 1.0351, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.5070921985815603, | |
| "grad_norm": 0.4247342696761821, | |
| "learning_rate": 1.972442152722158e-05, | |
| "loss": 1.04, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 0.41437878465719885, | |
| "learning_rate": 1.97147139290228e-05, | |
| "loss": 1.0916, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5141843971631206, | |
| "grad_norm": 0.43538254663755344, | |
| "learning_rate": 1.9704840769141435e-05, | |
| "loss": 1.0462, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5177304964539007, | |
| "grad_norm": 0.4133957050527475, | |
| "learning_rate": 1.969480221583946e-05, | |
| "loss": 1.0684, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5212765957446809, | |
| "grad_norm": 0.4321179837333326, | |
| "learning_rate": 1.968459844019755e-05, | |
| "loss": 1.0537, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.524822695035461, | |
| "grad_norm": 0.4199126244655049, | |
| "learning_rate": 1.9674229616112147e-05, | |
| "loss": 1.0694, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.5283687943262412, | |
| "grad_norm": 0.41968308383381936, | |
| "learning_rate": 1.9663695920292528e-05, | |
| "loss": 1.0374, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.5319148936170213, | |
| "grad_norm": 0.3937539527163739, | |
| "learning_rate": 1.9652997532257752e-05, | |
| "loss": 1.0097, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5354609929078015, | |
| "grad_norm": 0.4151219034363994, | |
| "learning_rate": 1.964213463433364e-05, | |
| "loss": 1.0286, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.5390070921985816, | |
| "grad_norm": 0.4040400586893122, | |
| "learning_rate": 1.9631107411649632e-05, | |
| "loss": 1.0483, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.5425531914893617, | |
| "grad_norm": 0.3716515261787462, | |
| "learning_rate": 1.9619916052135676e-05, | |
| "loss": 1.054, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.5460992907801419, | |
| "grad_norm": 0.4650548851701862, | |
| "learning_rate": 1.9608560746518977e-05, | |
| "loss": 1.0405, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.549645390070922, | |
| "grad_norm": 0.44014703147533524, | |
| "learning_rate": 1.959704168832078e-05, | |
| "loss": 1.065, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5531914893617021, | |
| "grad_norm": 0.4012529413831958, | |
| "learning_rate": 1.9585359073853052e-05, | |
| "loss": 1.0896, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5567375886524822, | |
| "grad_norm": 0.4252692558854423, | |
| "learning_rate": 1.957351310221516e-05, | |
| "loss": 1.0305, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5602836879432624, | |
| "grad_norm": 0.41637802948134767, | |
| "learning_rate": 1.9561503975290446e-05, | |
| "loss": 1.0678, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5638297872340425, | |
| "grad_norm": 0.41613274402508954, | |
| "learning_rate": 1.954933189774282e-05, | |
| "loss": 1.0199, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5673758865248227, | |
| "grad_norm": 0.39382583802044363, | |
| "learning_rate": 1.9536997077013236e-05, | |
| "loss": 1.0495, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5709219858156028, | |
| "grad_norm": 0.388499486904763, | |
| "learning_rate": 1.95244997233162e-05, | |
| "loss": 1.0014, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.574468085106383, | |
| "grad_norm": 0.38512135545026266, | |
| "learning_rate": 1.951184004963615e-05, | |
| "loss": 1.0718, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5780141843971631, | |
| "grad_norm": 0.3783281093275966, | |
| "learning_rate": 1.9499018271723836e-05, | |
| "loss": 1.0206, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5815602836879432, | |
| "grad_norm": 0.43369685190384033, | |
| "learning_rate": 1.9486034608092657e-05, | |
| "loss": 1.0498, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5851063829787234, | |
| "grad_norm": 0.3987508299716515, | |
| "learning_rate": 1.9472889280014924e-05, | |
| "loss": 1.0351, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5886524822695035, | |
| "grad_norm": 0.4100807327115829, | |
| "learning_rate": 1.945958251151809e-05, | |
| "loss": 1.0499, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5921985815602837, | |
| "grad_norm": 0.4256425875406832, | |
| "learning_rate": 1.944611452938094e-05, | |
| "loss": 1.0487, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5957446808510638, | |
| "grad_norm": 0.44803839464658235, | |
| "learning_rate": 1.943248556312971e-05, | |
| "loss": 0.9881, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.599290780141844, | |
| "grad_norm": 0.40259897757633056, | |
| "learning_rate": 1.9418695845034197e-05, | |
| "loss": 1.0168, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6028368794326241, | |
| "grad_norm": 0.42470303559371697, | |
| "learning_rate": 1.9404745610103785e-05, | |
| "loss": 1.0281, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6063829787234043, | |
| "grad_norm": 0.41403135143146674, | |
| "learning_rate": 1.9390635096083443e-05, | |
| "loss": 1.0543, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6099290780141844, | |
| "grad_norm": 0.4204328398131905, | |
| "learning_rate": 1.937636454344967e-05, | |
| "loss": 1.0365, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.6134751773049646, | |
| "grad_norm": 0.46115860711121315, | |
| "learning_rate": 1.936193419540641e-05, | |
| "loss": 1.0458, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.6170212765957447, | |
| "grad_norm": 0.4378596032908916, | |
| "learning_rate": 1.9347344297880892e-05, | |
| "loss": 1.0556, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.6205673758865248, | |
| "grad_norm": 0.5113254010648783, | |
| "learning_rate": 1.9332595099519454e-05, | |
| "loss": 1.0638, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.624113475177305, | |
| "grad_norm": 0.4512162294917552, | |
| "learning_rate": 1.9317686851683285e-05, | |
| "loss": 1.0662, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.6276595744680851, | |
| "grad_norm": 0.4304702959646727, | |
| "learning_rate": 1.9302619808444158e-05, | |
| "loss": 1.0468, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.6312056737588653, | |
| "grad_norm": 0.4719059649772538, | |
| "learning_rate": 1.9287394226580102e-05, | |
| "loss": 1.0701, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.6347517730496454, | |
| "grad_norm": 0.3734601257160578, | |
| "learning_rate": 1.9272010365571002e-05, | |
| "loss": 1.0236, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 0.4651134237014968, | |
| "learning_rate": 1.9256468487594215e-05, | |
| "loss": 1.0541, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6418439716312057, | |
| "grad_norm": 0.4042541523513242, | |
| "learning_rate": 1.9240768857520062e-05, | |
| "loss": 1.0466, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.6453900709219859, | |
| "grad_norm": 0.4243886495965767, | |
| "learning_rate": 1.922491174290734e-05, | |
| "loss": 1.0424, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.648936170212766, | |
| "grad_norm": 0.40111191394993834, | |
| "learning_rate": 1.9208897413998758e-05, | |
| "loss": 1.0218, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.6524822695035462, | |
| "grad_norm": 0.3940895377839162, | |
| "learning_rate": 1.9192726143716322e-05, | |
| "loss": 1.0594, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.6560283687943262, | |
| "grad_norm": 0.42271957323204373, | |
| "learning_rate": 1.9176398207656693e-05, | |
| "loss": 1.0432, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.6595744680851063, | |
| "grad_norm": 0.37986382559991433, | |
| "learning_rate": 1.9159913884086486e-05, | |
| "loss": 1.0228, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.6631205673758865, | |
| "grad_norm": 0.4074315975311201, | |
| "learning_rate": 1.9143273453937533e-05, | |
| "loss": 1.0569, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.40463938046019843, | |
| "learning_rate": 1.9126477200802084e-05, | |
| "loss": 1.0295, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6702127659574468, | |
| "grad_norm": 0.4497742000064388, | |
| "learning_rate": 1.9109525410927988e-05, | |
| "loss": 1.0472, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6737588652482269, | |
| "grad_norm": 0.4079176238827453, | |
| "learning_rate": 1.9092418373213795e-05, | |
| "loss": 1.0033, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6773049645390071, | |
| "grad_norm": 0.38168468355602525, | |
| "learning_rate": 1.9075156379203867e-05, | |
| "loss": 1.018, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6808510638297872, | |
| "grad_norm": 0.4270641992780327, | |
| "learning_rate": 1.9057739723083366e-05, | |
| "loss": 1.0285, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6843971631205674, | |
| "grad_norm": 0.4468028496736141, | |
| "learning_rate": 1.904016870167327e-05, | |
| "loss": 1.0169, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6879432624113475, | |
| "grad_norm": 0.388728811354985, | |
| "learning_rate": 1.902244361442531e-05, | |
| "loss": 1.0109, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6914893617021277, | |
| "grad_norm": 0.4366235722900726, | |
| "learning_rate": 1.9004564763416856e-05, | |
| "loss": 1.0279, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6950354609929078, | |
| "grad_norm": 0.4005450474464167, | |
| "learning_rate": 1.8986532453345785e-05, | |
| "loss": 1.0614, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6985815602836879, | |
| "grad_norm": 0.4283032818637797, | |
| "learning_rate": 1.8968346991525267e-05, | |
| "loss": 0.9702, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.7021276595744681, | |
| "grad_norm": 0.4149454329152974, | |
| "learning_rate": 1.895000868787855e-05, | |
| "loss": 1.0506, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.7056737588652482, | |
| "grad_norm": 0.3908431105870882, | |
| "learning_rate": 1.8931517854933664e-05, | |
| "loss": 1.0352, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.7092198581560284, | |
| "grad_norm": 0.4122069231448336, | |
| "learning_rate": 1.89128748078181e-05, | |
| "loss": 1.0082, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7127659574468085, | |
| "grad_norm": 0.39206516921546697, | |
| "learning_rate": 1.8894079864253434e-05, | |
| "loss": 1.0089, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.7163120567375887, | |
| "grad_norm": 0.4017261077067384, | |
| "learning_rate": 1.887513334454992e-05, | |
| "loss": 1.0385, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.7198581560283688, | |
| "grad_norm": 0.4144866828019059, | |
| "learning_rate": 1.8856035571601027e-05, | |
| "loss": 1.012, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.723404255319149, | |
| "grad_norm": 0.4476529046794145, | |
| "learning_rate": 1.8836786870877942e-05, | |
| "loss": 1.0259, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.7269503546099291, | |
| "grad_norm": 0.4139864974336738, | |
| "learning_rate": 1.8817387570424003e-05, | |
| "loss": 1.0546, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.7304964539007093, | |
| "grad_norm": 0.41744847245347383, | |
| "learning_rate": 1.879783800084914e-05, | |
| "loss": 1.0575, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.7340425531914894, | |
| "grad_norm": 0.41092263512752675, | |
| "learning_rate": 1.8778138495324213e-05, | |
| "loss": 0.9908, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.7375886524822695, | |
| "grad_norm": 0.4699649880193204, | |
| "learning_rate": 1.8758289389575355e-05, | |
| "loss": 1.0347, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.7411347517730497, | |
| "grad_norm": 0.43956419673006875, | |
| "learning_rate": 1.873829102187823e-05, | |
| "loss": 1.0577, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.7446808510638298, | |
| "grad_norm": 0.42241045409865247, | |
| "learning_rate": 1.8718143733052278e-05, | |
| "loss": 1.0152, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.74822695035461, | |
| "grad_norm": 0.43120791636280065, | |
| "learning_rate": 1.8697847866454922e-05, | |
| "loss": 0.9924, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.75177304964539, | |
| "grad_norm": 0.4347253404784015, | |
| "learning_rate": 1.867740376797568e-05, | |
| "loss": 1.008, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.7553191489361702, | |
| "grad_norm": 0.4312933323093618, | |
| "learning_rate": 1.865681178603031e-05, | |
| "loss": 0.9941, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.7588652482269503, | |
| "grad_norm": 0.4114365381511061, | |
| "learning_rate": 1.8636072271554842e-05, | |
| "loss": 1.0626, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.7624113475177305, | |
| "grad_norm": 0.4629217798127864, | |
| "learning_rate": 1.8615185577999616e-05, | |
| "loss": 1.0296, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 0.44094993883878913, | |
| "learning_rate": 1.8594152061323245e-05, | |
| "loss": 1.0399, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.7695035460992907, | |
| "grad_norm": 0.415176959865907, | |
| "learning_rate": 1.857297207998656e-05, | |
| "loss": 1.035, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.7730496453900709, | |
| "grad_norm": 0.44266355517212597, | |
| "learning_rate": 1.85516459949465e-05, | |
| "loss": 1.1049, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.776595744680851, | |
| "grad_norm": 0.44043411082498807, | |
| "learning_rate": 1.853017416964994e-05, | |
| "loss": 1.015, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7801418439716312, | |
| "grad_norm": 0.4033941099550602, | |
| "learning_rate": 1.850855697002753e-05, | |
| "loss": 1.0421, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7836879432624113, | |
| "grad_norm": 0.43450208494361897, | |
| "learning_rate": 1.8486794764487436e-05, | |
| "loss": 1.0474, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7872340425531915, | |
| "grad_norm": 0.6137072500977133, | |
| "learning_rate": 1.8464887923909062e-05, | |
| "loss": 1.0659, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7907801418439716, | |
| "grad_norm": 0.42816908651382335, | |
| "learning_rate": 1.844283682163675e-05, | |
| "loss": 1.0588, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7943262411347518, | |
| "grad_norm": 0.3865343363054826, | |
| "learning_rate": 1.8420641833473386e-05, | |
| "loss": 1.0228, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7978723404255319, | |
| "grad_norm": 0.42101602612274497, | |
| "learning_rate": 1.839830333767402e-05, | |
| "loss": 0.9757, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8014184397163121, | |
| "grad_norm": 0.40910650357636097, | |
| "learning_rate": 1.8375821714939408e-05, | |
| "loss": 1.0464, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.8049645390070922, | |
| "grad_norm": 0.42468493209973357, | |
| "learning_rate": 1.835319734840953e-05, | |
| "loss": 1.0311, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.8085106382978723, | |
| "grad_norm": 0.38532684525673616, | |
| "learning_rate": 1.833043062365706e-05, | |
| "loss": 1.0266, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.8120567375886525, | |
| "grad_norm": 0.4203249202064101, | |
| "learning_rate": 1.8307521928680782e-05, | |
| "loss": 1.033, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.8156028368794326, | |
| "grad_norm": 0.4110077616808033, | |
| "learning_rate": 1.8284471653898995e-05, | |
| "loss": 1.0488, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8191489361702128, | |
| "grad_norm": 0.4088394328094259, | |
| "learning_rate": 1.8261280192142857e-05, | |
| "loss": 0.9693, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.8226950354609929, | |
| "grad_norm": 0.3999605154188856, | |
| "learning_rate": 1.823794793864968e-05, | |
| "loss": 1.0256, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.8262411347517731, | |
| "grad_norm": 0.4089862024662302, | |
| "learning_rate": 1.82144752910562e-05, | |
| "loss": 1.0152, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.8297872340425532, | |
| "grad_norm": 0.3889593277582271, | |
| "learning_rate": 1.81908626493918e-05, | |
| "loss": 1.003, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.41794204162797643, | |
| "learning_rate": 1.8167110416071696e-05, | |
| "loss": 1.0425, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.8368794326241135, | |
| "grad_norm": 0.3952447291799417, | |
| "learning_rate": 1.8143218995890072e-05, | |
| "loss": 1.0034, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.8404255319148937, | |
| "grad_norm": 0.45317681752619565, | |
| "learning_rate": 1.8119188796013186e-05, | |
| "loss": 1.0461, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.8439716312056738, | |
| "grad_norm": 0.3711961454153343, | |
| "learning_rate": 1.8095020225972436e-05, | |
| "loss": 1.0439, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.8475177304964538, | |
| "grad_norm": 0.4755874761259998, | |
| "learning_rate": 1.807071369765736e-05, | |
| "loss": 1.0436, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 0.4196318651413355, | |
| "learning_rate": 1.804626962530865e-05, | |
| "loss": 1.0486, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8546099290780141, | |
| "grad_norm": 0.4125254595290099, | |
| "learning_rate": 1.8021688425511058e-05, | |
| "loss": 1.0293, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.8581560283687943, | |
| "grad_norm": 0.39729235078930125, | |
| "learning_rate": 1.7996970517186315e-05, | |
| "loss": 0.9931, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.8617021276595744, | |
| "grad_norm": 0.40981989014199277, | |
| "learning_rate": 1.7972116321585997e-05, | |
| "loss": 1.0016, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.8652482269503546, | |
| "grad_norm": 0.3941668554247862, | |
| "learning_rate": 1.794712626228433e-05, | |
| "loss": 1.0114, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.8687943262411347, | |
| "grad_norm": 0.4273992838829838, | |
| "learning_rate": 1.7922000765170976e-05, | |
| "loss": 1.0231, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.8723404255319149, | |
| "grad_norm": 0.408707494956399, | |
| "learning_rate": 1.789674025844378e-05, | |
| "loss": 1.021, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.875886524822695, | |
| "grad_norm": 0.39186386718572935, | |
| "learning_rate": 1.7871345172601475e-05, | |
| "loss": 1.0313, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.8794326241134752, | |
| "grad_norm": 0.4041443699597106, | |
| "learning_rate": 1.7845815940436336e-05, | |
| "loss": 1.0161, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.8829787234042553, | |
| "grad_norm": 0.3791911252140322, | |
| "learning_rate": 1.7820152997026807e-05, | |
| "loss": 1.0213, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.8865248226950354, | |
| "grad_norm": 0.3812683533757764, | |
| "learning_rate": 1.7794356779730084e-05, | |
| "loss": 1.0071, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8900709219858156, | |
| "grad_norm": 0.391304314088133, | |
| "learning_rate": 1.7768427728174677e-05, | |
| "loss": 1.0088, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.8936170212765957, | |
| "grad_norm": 0.34306587428261137, | |
| "learning_rate": 1.7742366284252895e-05, | |
| "loss": 0.9865, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8971631205673759, | |
| "grad_norm": 0.3991855772440667, | |
| "learning_rate": 1.7716172892113326e-05, | |
| "loss": 1.0133, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.900709219858156, | |
| "grad_norm": 0.3837234039192193, | |
| "learning_rate": 1.768984799815328e-05, | |
| "loss": 0.993, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.9042553191489362, | |
| "grad_norm": 0.42478259818829744, | |
| "learning_rate": 1.7663392051011156e-05, | |
| "loss": 1.0383, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.9078014184397163, | |
| "grad_norm": 0.37024984262507593, | |
| "learning_rate": 1.7636805501558804e-05, | |
| "loss": 1.0335, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.9113475177304965, | |
| "grad_norm": 0.4073494702670319, | |
| "learning_rate": 1.7610088802893867e-05, | |
| "loss": 1.0341, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.9148936170212766, | |
| "grad_norm": 0.41045424811733167, | |
| "learning_rate": 1.7583242410332016e-05, | |
| "loss": 1.0435, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.9184397163120568, | |
| "grad_norm": 0.35484485597404164, | |
| "learning_rate": 1.7556266781399227e-05, | |
| "loss": 1.0778, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.9219858156028369, | |
| "grad_norm": 0.39414786905368276, | |
| "learning_rate": 1.752916237582396e-05, | |
| "loss": 0.9929, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.925531914893617, | |
| "grad_norm": 0.3779847959746389, | |
| "learning_rate": 1.7501929655529344e-05, | |
| "loss": 1.0518, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.9290780141843972, | |
| "grad_norm": 0.4023732096344426, | |
| "learning_rate": 1.7474569084625282e-05, | |
| "loss": 1.024, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.9326241134751773, | |
| "grad_norm": 0.38745144745402504, | |
| "learning_rate": 1.7447081129400562e-05, | |
| "loss": 1.0152, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.9361702127659575, | |
| "grad_norm": 0.35292649898123035, | |
| "learning_rate": 1.74194662583149e-05, | |
| "loss": 1.0142, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.9397163120567376, | |
| "grad_norm": 0.41603250139588377, | |
| "learning_rate": 1.7391724941990952e-05, | |
| "loss": 1.0182, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.9432624113475178, | |
| "grad_norm": 0.36463091568100964, | |
| "learning_rate": 1.7363857653206307e-05, | |
| "loss": 1.0292, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.9468085106382979, | |
| "grad_norm": 0.40201841807122, | |
| "learning_rate": 1.7335864866885424e-05, | |
| "loss": 1.027, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.950354609929078, | |
| "grad_norm": 0.4073367489856179, | |
| "learning_rate": 1.7307747060091525e-05, | |
| "loss": 0.989, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.9539007092198581, | |
| "grad_norm": 0.4328837367699121, | |
| "learning_rate": 1.7279504712018495e-05, | |
| "loss": 1.0748, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 0.3992266115353178, | |
| "learning_rate": 1.7251138303982675e-05, | |
| "loss": 1.0325, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9609929078014184, | |
| "grad_norm": 0.378699543152666, | |
| "learning_rate": 1.72226483194147e-05, | |
| "loss": 1.0129, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.9645390070921985, | |
| "grad_norm": 0.36186590779535055, | |
| "learning_rate": 1.7194035243851225e-05, | |
| "loss": 0.9969, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.9680851063829787, | |
| "grad_norm": 0.3752934769207618, | |
| "learning_rate": 1.7165299564926683e-05, | |
| "loss": 0.9828, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.9716312056737588, | |
| "grad_norm": 0.40860538489185705, | |
| "learning_rate": 1.7136441772364943e-05, | |
| "loss": 1.0382, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.975177304964539, | |
| "grad_norm": 0.3568597745180973, | |
| "learning_rate": 1.710746235797099e-05, | |
| "loss": 1.0251, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.9787234042553191, | |
| "grad_norm": 0.4276859934230444, | |
| "learning_rate": 1.7078361815622518e-05, | |
| "loss": 1.0205, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.9822695035460993, | |
| "grad_norm": 0.37449732400229285, | |
| "learning_rate": 1.704914064126155e-05, | |
| "loss": 1.0266, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.9858156028368794, | |
| "grad_norm": 0.389616566339835, | |
| "learning_rate": 1.7019799332885944e-05, | |
| "loss": 1.0229, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.9893617021276596, | |
| "grad_norm": 0.3879192737138395, | |
| "learning_rate": 1.6990338390540935e-05, | |
| "loss": 1.0243, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.9929078014184397, | |
| "grad_norm": 0.41573175808695956, | |
| "learning_rate": 1.6960758316310597e-05, | |
| "loss": 1.0229, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9964539007092199, | |
| "grad_norm": 0.38073253927449124, | |
| "learning_rate": 1.69310596143093e-05, | |
| "loss": 0.9925, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.4255526467602786, | |
| "learning_rate": 1.6901242790673108e-05, | |
| "loss": 1.026, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.00354609929078, | |
| "grad_norm": 0.42162634204753163, | |
| "learning_rate": 1.6871308353551153e-05, | |
| "loss": 1.0029, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.0070921985815602, | |
| "grad_norm": 0.4337284231713999, | |
| "learning_rate": 1.684125681309699e-05, | |
| "loss": 0.9403, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.0106382978723405, | |
| "grad_norm": 0.3708412522789234, | |
| "learning_rate": 1.6811088681459876e-05, | |
| "loss": 0.9679, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.0141843971631206, | |
| "grad_norm": 0.4547631877983983, | |
| "learning_rate": 1.678080447277608e-05, | |
| "loss": 0.998, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.0177304964539007, | |
| "grad_norm": 0.376112068736744, | |
| "learning_rate": 1.6750404703160063e-05, | |
| "loss": 0.993, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.0212765957446808, | |
| "grad_norm": 0.4164541233002394, | |
| "learning_rate": 1.6719889890695756e-05, | |
| "loss": 0.9793, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.024822695035461, | |
| "grad_norm": 0.4464551981366061, | |
| "learning_rate": 1.6689260555427666e-05, | |
| "loss": 0.9713, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.0283687943262412, | |
| "grad_norm": 0.41796256047468283, | |
| "learning_rate": 1.665851721935205e-05, | |
| "loss": 0.9949, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.0319148936170213, | |
| "grad_norm": 0.3889331388007683, | |
| "learning_rate": 1.6627660406408e-05, | |
| "loss": 1.0553, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.0354609929078014, | |
| "grad_norm": 0.42334438714966177, | |
| "learning_rate": 1.6596690642468537e-05, | |
| "loss": 0.9754, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.0390070921985815, | |
| "grad_norm": 0.42310995735693097, | |
| "learning_rate": 1.6565608455331622e-05, | |
| "loss": 0.9374, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.0425531914893618, | |
| "grad_norm": 0.3692897902651878, | |
| "learning_rate": 1.6534414374711167e-05, | |
| "loss": 0.9771, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.0460992907801419, | |
| "grad_norm": 0.3925797062980314, | |
| "learning_rate": 1.650310893222803e-05, | |
| "loss": 0.9761, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.049645390070922, | |
| "grad_norm": 0.39833455353073705, | |
| "learning_rate": 1.6471692661400925e-05, | |
| "loss": 1.0217, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.053191489361702, | |
| "grad_norm": 0.3819281134424317, | |
| "learning_rate": 1.6440166097637355e-05, | |
| "loss": 0.9801, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.0567375886524824, | |
| "grad_norm": 0.3740823723188146, | |
| "learning_rate": 1.640852977822446e-05, | |
| "loss": 0.9807, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.0602836879432624, | |
| "grad_norm": 0.3935831640578161, | |
| "learning_rate": 1.637678424231989e-05, | |
| "loss": 0.9534, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.0638297872340425, | |
| "grad_norm": 0.4130459791663398, | |
| "learning_rate": 1.6344930030942593e-05, | |
| "loss": 0.9945, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0673758865248226, | |
| "grad_norm": 0.4172895714087477, | |
| "learning_rate": 1.6312967686963606e-05, | |
| "loss": 0.968, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.070921985815603, | |
| "grad_norm": 0.3854672364656851, | |
| "learning_rate": 1.6280897755096805e-05, | |
| "loss": 0.961, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.074468085106383, | |
| "grad_norm": 0.4336599329155487, | |
| "learning_rate": 1.6248720781889612e-05, | |
| "loss": 0.9568, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.0780141843971631, | |
| "grad_norm": 0.42322982607889414, | |
| "learning_rate": 1.6216437315713693e-05, | |
| "loss": 0.9821, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.0815602836879432, | |
| "grad_norm": 0.3974435049130651, | |
| "learning_rate": 1.61840479067556e-05, | |
| "loss": 0.9453, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.0851063829787233, | |
| "grad_norm": 0.4019963999487, | |
| "learning_rate": 1.6151553107007395e-05, | |
| "loss": 0.9374, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.0886524822695036, | |
| "grad_norm": 0.3932799513813298, | |
| "learning_rate": 1.6118953470257267e-05, | |
| "loss": 0.9708, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.0921985815602837, | |
| "grad_norm": 0.3986498595695824, | |
| "learning_rate": 1.608624955208006e-05, | |
| "loss": 0.9646, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.0957446808510638, | |
| "grad_norm": 0.368172249858901, | |
| "learning_rate": 1.6053441909827823e-05, | |
| "loss": 0.995, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.099290780141844, | |
| "grad_norm": 0.4263975876112546, | |
| "learning_rate": 1.6020531102620303e-05, | |
| "loss": 1.0011, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.1028368794326242, | |
| "grad_norm": 0.3482330087299883, | |
| "learning_rate": 1.5987517691335443e-05, | |
| "loss": 0.9654, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.1063829787234043, | |
| "grad_norm": 0.4368586512892545, | |
| "learning_rate": 1.595440223859977e-05, | |
| "loss": 0.9683, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.1099290780141844, | |
| "grad_norm": 0.35850477996330843, | |
| "learning_rate": 1.5921185308778863e-05, | |
| "loss": 0.97, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.1134751773049645, | |
| "grad_norm": 0.4255767896591614, | |
| "learning_rate": 1.5887867467967703e-05, | |
| "loss": 0.9879, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.1170212765957448, | |
| "grad_norm": 0.37321781456573017, | |
| "learning_rate": 1.585444928398103e-05, | |
| "loss": 0.9675, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.1205673758865249, | |
| "grad_norm": 0.37089000074077766, | |
| "learning_rate": 1.5820931326343673e-05, | |
| "loss": 0.9362, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.124113475177305, | |
| "grad_norm": 0.5469094383974885, | |
| "learning_rate": 1.5787314166280833e-05, | |
| "loss": 0.9871, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.127659574468085, | |
| "grad_norm": 0.3929479871521509, | |
| "learning_rate": 1.5753598376708365e-05, | |
| "loss": 0.952, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.1312056737588652, | |
| "grad_norm": 0.3861132355230585, | |
| "learning_rate": 1.5719784532223e-05, | |
| "loss": 0.9804, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.1347517730496455, | |
| "grad_norm": 0.3699956189442316, | |
| "learning_rate": 1.5685873209092547e-05, | |
| "loss": 0.9658, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.1382978723404256, | |
| "grad_norm": 0.4266810239187451, | |
| "learning_rate": 1.5651864985246095e-05, | |
| "loss": 0.9693, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.1418439716312057, | |
| "grad_norm": 0.38488281487698434, | |
| "learning_rate": 1.5617760440264143e-05, | |
| "loss": 0.9705, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.1453900709219857, | |
| "grad_norm": 0.43051269351008986, | |
| "learning_rate": 1.558356015536873e-05, | |
| "loss": 0.9813, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.148936170212766, | |
| "grad_norm": 0.3894392288886495, | |
| "learning_rate": 1.5549264713413536e-05, | |
| "loss": 0.9521, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.1524822695035462, | |
| "grad_norm": 0.41603671070271653, | |
| "learning_rate": 1.551487469887393e-05, | |
| "loss": 0.966, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.1560283687943262, | |
| "grad_norm": 0.4076462857668706, | |
| "learning_rate": 1.5480390697837033e-05, | |
| "loss": 1.0079, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.1595744680851063, | |
| "grad_norm": 0.38313030955774846, | |
| "learning_rate": 1.5445813297991713e-05, | |
| "loss": 0.9585, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.1631205673758864, | |
| "grad_norm": 0.4134651737788734, | |
| "learning_rate": 1.541114308861857e-05, | |
| "loss": 0.9736, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 0.4212975648234023, | |
| "learning_rate": 1.5376380660579907e-05, | |
| "loss": 1.0212, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.1702127659574468, | |
| "grad_norm": 0.39702588616324036, | |
| "learning_rate": 1.5341526606309646e-05, | |
| "loss": 0.9744, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.173758865248227, | |
| "grad_norm": 0.3720449347403283, | |
| "learning_rate": 1.5306581519803233e-05, | |
| "loss": 0.9566, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.177304964539007, | |
| "grad_norm": 0.40095384584626675, | |
| "learning_rate": 1.5271545996607525e-05, | |
| "loss": 1.0078, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.1808510638297873, | |
| "grad_norm": 0.4089399913580165, | |
| "learning_rate": 1.5236420633810633e-05, | |
| "loss": 0.9953, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.1843971631205674, | |
| "grad_norm": 0.4073214342319023, | |
| "learning_rate": 1.520120603003174e-05, | |
| "loss": 1.0064, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.1879432624113475, | |
| "grad_norm": 0.39061623369847304, | |
| "learning_rate": 1.516590278541092e-05, | |
| "loss": 0.9624, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.1914893617021276, | |
| "grad_norm": 0.43052959263616075, | |
| "learning_rate": 1.513051150159888e-05, | |
| "loss": 0.9548, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.1950354609929077, | |
| "grad_norm": 0.4235116096113713, | |
| "learning_rate": 1.5095032781746732e-05, | |
| "loss": 0.9411, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.198581560283688, | |
| "grad_norm": 0.38060647472893777, | |
| "learning_rate": 1.5059467230495703e-05, | |
| "loss": 0.9588, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.202127659574468, | |
| "grad_norm": 0.48596035224216816, | |
| "learning_rate": 1.5023815453966837e-05, | |
| "loss": 0.9988, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.2056737588652482, | |
| "grad_norm": 0.4167021821865609, | |
| "learning_rate": 1.4988078059750652e-05, | |
| "loss": 1.0026, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.2092198581560283, | |
| "grad_norm": 0.44545889732960464, | |
| "learning_rate": 1.4952255656896801e-05, | |
| "loss": 0.9888, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.2127659574468086, | |
| "grad_norm": 0.3918874394085114, | |
| "learning_rate": 1.4916348855903679e-05, | |
| "loss": 0.9889, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.2163120567375887, | |
| "grad_norm": 0.36397855903707554, | |
| "learning_rate": 1.488035826870803e-05, | |
| "loss": 0.9798, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.2198581560283688, | |
| "grad_norm": 0.4098853286858338, | |
| "learning_rate": 1.484428450867451e-05, | |
| "loss": 0.9582, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.2234042553191489, | |
| "grad_norm": 0.38453575428102105, | |
| "learning_rate": 1.480812819058524e-05, | |
| "loss": 1.0062, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.226950354609929, | |
| "grad_norm": 0.433496258525714, | |
| "learning_rate": 1.4771889930629317e-05, | |
| "loss": 0.9442, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.2304964539007093, | |
| "grad_norm": 0.38804486850428566, | |
| "learning_rate": 1.473557034639233e-05, | |
| "loss": 0.9674, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.2340425531914894, | |
| "grad_norm": 0.35527739750700876, | |
| "learning_rate": 1.4699170056845822e-05, | |
| "loss": 0.9545, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.2375886524822695, | |
| "grad_norm": 0.38950854347971275, | |
| "learning_rate": 1.4662689682336746e-05, | |
| "loss": 0.9605, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.2411347517730495, | |
| "grad_norm": 0.3832098944579604, | |
| "learning_rate": 1.4626129844576894e-05, | |
| "loss": 0.9749, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.2446808510638299, | |
| "grad_norm": 0.4336296794079613, | |
| "learning_rate": 1.4589491166632292e-05, | |
| "loss": 0.9415, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.24822695035461, | |
| "grad_norm": 0.40843255555461383, | |
| "learning_rate": 1.4552774272912602e-05, | |
| "loss": 0.9766, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.25177304964539, | |
| "grad_norm": 0.44986141537811064, | |
| "learning_rate": 1.4515979789160455e-05, | |
| "loss": 0.9856, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.2553191489361701, | |
| "grad_norm": 0.458707236262281, | |
| "learning_rate": 1.4479108342440804e-05, | |
| "loss": 0.9937, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.2588652482269502, | |
| "grad_norm": 0.40472496228744964, | |
| "learning_rate": 1.4442160561130238e-05, | |
| "loss": 0.9655, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.2624113475177305, | |
| "grad_norm": 0.39162624954597314, | |
| "learning_rate": 1.4405137074906259e-05, | |
| "loss": 0.9413, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.2659574468085106, | |
| "grad_norm": 0.4534609974783642, | |
| "learning_rate": 1.4368038514736566e-05, | |
| "loss": 0.9883, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.2695035460992907, | |
| "grad_norm": 0.3718189815457106, | |
| "learning_rate": 1.4330865512868293e-05, | |
| "loss": 0.9619, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.273049645390071, | |
| "grad_norm": 0.4649383944211333, | |
| "learning_rate": 1.429361870281724e-05, | |
| "loss": 1.0076, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.3949660291531871, | |
| "learning_rate": 1.4256298719357063e-05, | |
| "loss": 0.9752, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2801418439716312, | |
| "grad_norm": 0.4077416176647928, | |
| "learning_rate": 1.421890619850848e-05, | |
| "loss": 0.9496, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.2836879432624113, | |
| "grad_norm": 0.3895313217970244, | |
| "learning_rate": 1.4181441777528402e-05, | |
| "loss": 0.9754, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.2872340425531914, | |
| "grad_norm": 0.4495084176311799, | |
| "learning_rate": 1.4143906094899103e-05, | |
| "loss": 0.9408, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.2907801418439715, | |
| "grad_norm": 0.40503459294390115, | |
| "learning_rate": 1.410629979031731e-05, | |
| "loss": 0.9305, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.2943262411347518, | |
| "grad_norm": 0.4102179939736063, | |
| "learning_rate": 1.4068623504683334e-05, | |
| "loss": 0.9684, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.297872340425532, | |
| "grad_norm": 0.3987705956597129, | |
| "learning_rate": 1.4030877880090104e-05, | |
| "loss": 1.0156, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.301418439716312, | |
| "grad_norm": 0.42916989583146437, | |
| "learning_rate": 1.3993063559812268e-05, | |
| "loss": 0.9572, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.3049645390070923, | |
| "grad_norm": 0.3909945273536003, | |
| "learning_rate": 1.3955181188295203e-05, | |
| "loss": 0.9646, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.3085106382978724, | |
| "grad_norm": 0.44340897137841345, | |
| "learning_rate": 1.391723141114404e-05, | |
| "loss": 1.0004, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.3120567375886525, | |
| "grad_norm": 0.38336773400029367, | |
| "learning_rate": 1.3879214875112665e-05, | |
| "loss": 0.9815, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.3156028368794326, | |
| "grad_norm": 0.4587120808854636, | |
| "learning_rate": 1.3841132228092684e-05, | |
| "loss": 0.9451, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.3191489361702127, | |
| "grad_norm": 0.42258328196940764, | |
| "learning_rate": 1.3802984119102403e-05, | |
| "loss": 0.9619, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.322695035460993, | |
| "grad_norm": 0.397151037289473, | |
| "learning_rate": 1.3764771198275745e-05, | |
| "loss": 0.9866, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.326241134751773, | |
| "grad_norm": 0.37879764171065744, | |
| "learning_rate": 1.3726494116851189e-05, | |
| "loss": 0.9678, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.3297872340425532, | |
| "grad_norm": 0.4055657975764546, | |
| "learning_rate": 1.368815352716065e-05, | |
| "loss": 0.9504, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.37133045086049504, | |
| "learning_rate": 1.3649750082618387e-05, | |
| "loss": 0.9689, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.3368794326241136, | |
| "grad_norm": 0.41029972517648766, | |
| "learning_rate": 1.361128443770984e-05, | |
| "loss": 0.9708, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.3404255319148937, | |
| "grad_norm": 0.39076722012763027, | |
| "learning_rate": 1.3572757247980502e-05, | |
| "loss": 0.9743, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.3439716312056738, | |
| "grad_norm": 0.3448538829992877, | |
| "learning_rate": 1.353416917002473e-05, | |
| "loss": 0.9395, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.3475177304964538, | |
| "grad_norm": 0.37047746355086686, | |
| "learning_rate": 1.3495520861474565e-05, | |
| "loss": 0.9485, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.351063829787234, | |
| "grad_norm": 0.42680045310215664, | |
| "learning_rate": 1.3456812980988513e-05, | |
| "loss": 0.9995, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.3546099290780143, | |
| "grad_norm": 0.3414703460078149, | |
| "learning_rate": 1.3418046188240328e-05, | |
| "loss": 0.9613, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.3581560283687943, | |
| "grad_norm": 0.3829959439836121, | |
| "learning_rate": 1.3379221143907775e-05, | |
| "loss": 0.9859, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.3617021276595744, | |
| "grad_norm": 0.3701947232252572, | |
| "learning_rate": 1.3340338509661355e-05, | |
| "loss": 0.9833, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.3652482269503547, | |
| "grad_norm": 0.36949452405651734, | |
| "learning_rate": 1.3301398948153042e-05, | |
| "loss": 0.95, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.3687943262411348, | |
| "grad_norm": 0.3611222088473356, | |
| "learning_rate": 1.326240312300498e-05, | |
| "loss": 0.9872, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.372340425531915, | |
| "grad_norm": 0.3859473860907529, | |
| "learning_rate": 1.3223351698798186e-05, | |
| "loss": 0.9391, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.375886524822695, | |
| "grad_norm": 0.3582103645991868, | |
| "learning_rate": 1.318424534106121e-05, | |
| "loss": 0.9636, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.3794326241134751, | |
| "grad_norm": 0.3366736526064197, | |
| "learning_rate": 1.3145084716258804e-05, | |
| "loss": 0.9617, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.3829787234042552, | |
| "grad_norm": 0.36060765339239736, | |
| "learning_rate": 1.3105870491780559e-05, | |
| "loss": 0.9599, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.3865248226950355, | |
| "grad_norm": 0.398088313286727, | |
| "learning_rate": 1.3066603335929522e-05, | |
| "loss": 0.981, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.3900709219858156, | |
| "grad_norm": 0.36529659957165594, | |
| "learning_rate": 1.302728391791083e-05, | |
| "loss": 0.964, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.3936170212765957, | |
| "grad_norm": 0.4007319301068806, | |
| "learning_rate": 1.2987912907820274e-05, | |
| "loss": 0.9867, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.397163120567376, | |
| "grad_norm": 0.4247614902726926, | |
| "learning_rate": 1.2948490976632914e-05, | |
| "loss": 0.9446, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.400709219858156, | |
| "grad_norm": 0.3547395206863501, | |
| "learning_rate": 1.2909018796191616e-05, | |
| "loss": 0.9643, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.4042553191489362, | |
| "grad_norm": 0.6673933827613349, | |
| "learning_rate": 1.2869497039195609e-05, | |
| "loss": 0.9537, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.4078014184397163, | |
| "grad_norm": 0.35359698348808816, | |
| "learning_rate": 1.2829926379189025e-05, | |
| "loss": 0.9844, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.4113475177304964, | |
| "grad_norm": 0.36149036424597636, | |
| "learning_rate": 1.2790307490549432e-05, | |
| "loss": 0.9827, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.4148936170212765, | |
| "grad_norm": 0.3831897208194427, | |
| "learning_rate": 1.2750641048476309e-05, | |
| "loss": 0.9742, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.4184397163120568, | |
| "grad_norm": 0.3871487792244679, | |
| "learning_rate": 1.2710927728979568e-05, | |
| "loss": 0.953, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.4219858156028369, | |
| "grad_norm": 0.37024401544958657, | |
| "learning_rate": 1.2671168208868024e-05, | |
| "loss": 0.9524, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.425531914893617, | |
| "grad_norm": 0.4267642235307254, | |
| "learning_rate": 1.2631363165737854e-05, | |
| "loss": 0.9504, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.4290780141843973, | |
| "grad_norm": 0.3381259448637118, | |
| "learning_rate": 1.2591513277961058e-05, | |
| "loss": 0.9685, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.4326241134751774, | |
| "grad_norm": 0.3623948447486684, | |
| "learning_rate": 1.2551619224673894e-05, | |
| "loss": 0.9721, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.4361702127659575, | |
| "grad_norm": 0.3376920646108776, | |
| "learning_rate": 1.2511681685765308e-05, | |
| "loss": 0.9697, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.4397163120567376, | |
| "grad_norm": 0.3711071679343133, | |
| "learning_rate": 1.2471701341865342e-05, | |
| "loss": 0.9814, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.4432624113475176, | |
| "grad_norm": 0.3650675243183325, | |
| "learning_rate": 1.2431678874333532e-05, | |
| "loss": 0.9713, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.4468085106382977, | |
| "grad_norm": 0.3862656526697159, | |
| "learning_rate": 1.2391614965247307e-05, | |
| "loss": 0.9974, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.450354609929078, | |
| "grad_norm": 0.3532038311894071, | |
| "learning_rate": 1.2351510297390348e-05, | |
| "loss": 0.9807, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.4539007092198581, | |
| "grad_norm": 0.3467889591941661, | |
| "learning_rate": 1.2311365554240972e-05, | |
| "loss": 0.9281, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.4574468085106382, | |
| "grad_norm": 0.36275768850044293, | |
| "learning_rate": 1.2271181419960473e-05, | |
| "loss": 0.9743, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.4609929078014185, | |
| "grad_norm": 0.35517643206705524, | |
| "learning_rate": 1.2230958579381458e-05, | |
| "loss": 0.9801, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.4645390070921986, | |
| "grad_norm": 0.3877376505719052, | |
| "learning_rate": 1.2190697717996179e-05, | |
| "loss": 0.992, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.4680851063829787, | |
| "grad_norm": 0.3562682093151937, | |
| "learning_rate": 1.2150399521944858e-05, | |
| "loss": 0.9653, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.4716312056737588, | |
| "grad_norm": 0.3604528130360619, | |
| "learning_rate": 1.211006467800399e-05, | |
| "loss": 0.9431, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.475177304964539, | |
| "grad_norm": 0.3612796477592489, | |
| "learning_rate": 1.2069693873574628e-05, | |
| "loss": 0.9477, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.4787234042553192, | |
| "grad_norm": 0.34195694361156626, | |
| "learning_rate": 1.2029287796670684e-05, | |
| "loss": 0.9421, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.4822695035460993, | |
| "grad_norm": 0.38109197935236333, | |
| "learning_rate": 1.1988847135907188e-05, | |
| "loss": 0.9541, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.4858156028368794, | |
| "grad_norm": 0.3498732999579304, | |
| "learning_rate": 1.1948372580488574e-05, | |
| "loss": 0.9705, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.4893617021276595, | |
| "grad_norm": 0.39752070002562495, | |
| "learning_rate": 1.190786482019691e-05, | |
| "loss": 0.935, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.4929078014184398, | |
| "grad_norm": 0.3524932281108635, | |
| "learning_rate": 1.1867324545380159e-05, | |
| "loss": 0.9549, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.49645390070922, | |
| "grad_norm": 0.406904792088018, | |
| "learning_rate": 1.1826752446940401e-05, | |
| "loss": 0.9501, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.3836320671730948, | |
| "learning_rate": 1.1786149216322079e-05, | |
| "loss": 0.9843, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.50354609929078, | |
| "grad_norm": 0.3453064612911423, | |
| "learning_rate": 1.1745515545500186e-05, | |
| "loss": 0.988, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.5070921985815602, | |
| "grad_norm": 0.4169790496181414, | |
| "learning_rate": 1.17048521269685e-05, | |
| "loss": 0.9426, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.5106382978723403, | |
| "grad_norm": 0.35431862853342666, | |
| "learning_rate": 1.1664159653727767e-05, | |
| "loss": 1.0007, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.5141843971631206, | |
| "grad_norm": 0.4403221223658087, | |
| "learning_rate": 1.1623438819273887e-05, | |
| "loss": 0.9394, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.5177304964539007, | |
| "grad_norm": 0.35909026402669597, | |
| "learning_rate": 1.1582690317586116e-05, | |
| "loss": 0.9688, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.521276595744681, | |
| "grad_norm": 0.3616088953094568, | |
| "learning_rate": 1.1541914843115212e-05, | |
| "loss": 0.9548, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.524822695035461, | |
| "grad_norm": 0.41651072781255166, | |
| "learning_rate": 1.1501113090771618e-05, | |
| "loss": 0.9434, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.5283687943262412, | |
| "grad_norm": 0.3533959037818844, | |
| "learning_rate": 1.146028575591362e-05, | |
| "loss": 0.9627, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.5319148936170213, | |
| "grad_norm": 0.3987483236015162, | |
| "learning_rate": 1.1419433534335483e-05, | |
| "loss": 0.9786, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.5354609929078014, | |
| "grad_norm": 0.39355195566700935, | |
| "learning_rate": 1.1378557122255596e-05, | |
| "loss": 0.9607, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.5390070921985815, | |
| "grad_norm": 0.3719202018138273, | |
| "learning_rate": 1.133765721630462e-05, | |
| "loss": 0.9938, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.5425531914893615, | |
| "grad_norm": 0.3553320294386332, | |
| "learning_rate": 1.129673451351361e-05, | |
| "loss": 0.9611, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.5460992907801419, | |
| "grad_norm": 0.4083603203915794, | |
| "learning_rate": 1.1255789711302123e-05, | |
| "loss": 0.9735, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.549645390070922, | |
| "grad_norm": 0.3653527658924869, | |
| "learning_rate": 1.1214823507466345e-05, | |
| "loss": 0.9837, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.5531914893617023, | |
| "grad_norm": 0.3737537963108637, | |
| "learning_rate": 1.1173836600167202e-05, | |
| "loss": 0.9586, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.5567375886524824, | |
| "grad_norm": 0.393363237627448, | |
| "learning_rate": 1.1132829687918456e-05, | |
| "loss": 0.9328, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.5602836879432624, | |
| "grad_norm": 0.3816275461313281, | |
| "learning_rate": 1.1091803469574789e-05, | |
| "loss": 0.9539, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.5638297872340425, | |
| "grad_norm": 0.37772612682001405, | |
| "learning_rate": 1.1050758644319917e-05, | |
| "loss": 0.9284, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.5673758865248226, | |
| "grad_norm": 0.37139778430983705, | |
| "learning_rate": 1.1009695911654651e-05, | |
| "loss": 0.9592, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.5709219858156027, | |
| "grad_norm": 0.3789450188184618, | |
| "learning_rate": 1.0968615971385e-05, | |
| "loss": 0.9496, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.574468085106383, | |
| "grad_norm": 0.3490092134445974, | |
| "learning_rate": 1.092751952361022e-05, | |
| "loss": 0.9441, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.5780141843971631, | |
| "grad_norm": 0.37172081990209344, | |
| "learning_rate": 1.08864072687109e-05, | |
| "loss": 0.9737, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.5815602836879432, | |
| "grad_norm": 0.3689748363727634, | |
| "learning_rate": 1.0845279907337016e-05, | |
| "loss": 0.9625, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.5851063829787235, | |
| "grad_norm": 0.36769682217189764, | |
| "learning_rate": 1.0804138140395995e-05, | |
| "loss": 0.9448, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.5886524822695036, | |
| "grad_norm": 0.36948009642812196, | |
| "learning_rate": 1.0762982669040772e-05, | |
| "loss": 0.9422, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.5921985815602837, | |
| "grad_norm": 0.3874510000854089, | |
| "learning_rate": 1.0721814194657832e-05, | |
| "loss": 0.9559, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.5957446808510638, | |
| "grad_norm": 0.35123043713553576, | |
| "learning_rate": 1.0680633418855266e-05, | |
| "loss": 0.9909, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.599290780141844, | |
| "grad_norm": 0.3424584678806653, | |
| "learning_rate": 1.0639441043450809e-05, | |
| "loss": 0.987, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.602836879432624, | |
| "grad_norm": 0.3540879822796454, | |
| "learning_rate": 1.0598237770459887e-05, | |
| "loss": 0.9642, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.6063829787234043, | |
| "grad_norm": 0.341535695117283, | |
| "learning_rate": 1.0557024302083635e-05, | |
| "loss": 0.9458, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.6099290780141844, | |
| "grad_norm": 0.3401330561400603, | |
| "learning_rate": 1.0515801340696946e-05, | |
| "loss": 0.9537, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.6134751773049647, | |
| "grad_norm": 0.3793952706621045, | |
| "learning_rate": 1.0474569588836503e-05, | |
| "loss": 0.9788, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.6170212765957448, | |
| "grad_norm": 0.3601958693890712, | |
| "learning_rate": 1.043332974918879e-05, | |
| "loss": 0.9419, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.6205673758865249, | |
| "grad_norm": 0.36751037561946204, | |
| "learning_rate": 1.0392082524578135e-05, | |
| "loss": 0.954, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.624113475177305, | |
| "grad_norm": 0.3996259740255699, | |
| "learning_rate": 1.0350828617954712e-05, | |
| "loss": 0.97, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.627659574468085, | |
| "grad_norm": 0.3742204360804281, | |
| "learning_rate": 1.0309568732382582e-05, | |
| "loss": 0.9609, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.6312056737588652, | |
| "grad_norm": 0.3363933659013848, | |
| "learning_rate": 1.0268303571027696e-05, | |
| "loss": 0.9292, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.6347517730496453, | |
| "grad_norm": 0.3852661006055739, | |
| "learning_rate": 1.0227033837145924e-05, | |
| "loss": 0.9579, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.6382978723404256, | |
| "grad_norm": 0.3594510036459689, | |
| "learning_rate": 1.0185760234071054e-05, | |
| "loss": 0.9486, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.6418439716312057, | |
| "grad_norm": 0.38093406145891096, | |
| "learning_rate": 1.0144483465202818e-05, | |
| "loss": 0.9515, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.645390070921986, | |
| "grad_norm": 0.37391762202041084, | |
| "learning_rate": 1.01032042339949e-05, | |
| "loss": 0.951, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.648936170212766, | |
| "grad_norm": 0.3539528727618403, | |
| "learning_rate": 1.0061923243942947e-05, | |
| "loss": 0.9668, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.6524822695035462, | |
| "grad_norm": 0.3630678066448606, | |
| "learning_rate": 1.0020641198572582e-05, | |
| "loss": 0.9669, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.6560283687943262, | |
| "grad_norm": 0.3963323235424878, | |
| "learning_rate": 9.97935880142742e-06, | |
| "loss": 0.9728, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.6595744680851063, | |
| "grad_norm": 0.3708572491271238, | |
| "learning_rate": 9.938076756057055e-06, | |
| "loss": 0.9762, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.6631205673758864, | |
| "grad_norm": 0.3930914103727205, | |
| "learning_rate": 9.896795766005104e-06, | |
| "loss": 0.958, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.3878977246149519, | |
| "learning_rate": 9.855516534797187e-06, | |
| "loss": 0.9612, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.6702127659574468, | |
| "grad_norm": 0.32502813361802174, | |
| "learning_rate": 9.81423976592895e-06, | |
| "loss": 0.954, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.673758865248227, | |
| "grad_norm": 0.3470600424638187, | |
| "learning_rate": 9.77296616285408e-06, | |
| "loss": 0.9468, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.6773049645390072, | |
| "grad_norm": 0.3998668324871742, | |
| "learning_rate": 9.731696428972304e-06, | |
| "loss": 0.983, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.6808510638297873, | |
| "grad_norm": 0.3853130034264598, | |
| "learning_rate": 9.690431267617421e-06, | |
| "loss": 0.9814, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.6843971631205674, | |
| "grad_norm": 0.3393026991437144, | |
| "learning_rate": 9.649171382045293e-06, | |
| "loss": 0.9785, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.6879432624113475, | |
| "grad_norm": 0.40590543355182296, | |
| "learning_rate": 9.607917475421868e-06, | |
| "loss": 0.9443, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.6914893617021276, | |
| "grad_norm": 0.3536310502781339, | |
| "learning_rate": 9.566670250811213e-06, | |
| "loss": 0.9698, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.6950354609929077, | |
| "grad_norm": 0.3784372371857604, | |
| "learning_rate": 9.5254304111635e-06, | |
| "loss": 0.986, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.6985815602836878, | |
| "grad_norm": 0.3688331189520331, | |
| "learning_rate": 9.484198659303057e-06, | |
| "loss": 0.9149, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.702127659574468, | |
| "grad_norm": 0.34633104551777094, | |
| "learning_rate": 9.442975697916372e-06, | |
| "loss": 0.945, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.7056737588652482, | |
| "grad_norm": 0.3689457615265894, | |
| "learning_rate": 9.401762229540116e-06, | |
| "loss": 0.9336, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.7092198581560285, | |
| "grad_norm": 0.37664918461886804, | |
| "learning_rate": 9.360558956549193e-06, | |
| "loss": 0.927, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.7127659574468086, | |
| "grad_norm": 0.34778225857336614, | |
| "learning_rate": 9.319366581144736e-06, | |
| "loss": 0.964, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.7163120567375887, | |
| "grad_norm": 0.3527437985857996, | |
| "learning_rate": 9.278185805342171e-06, | |
| "loss": 0.9792, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.7198581560283688, | |
| "grad_norm": 0.36930633702754206, | |
| "learning_rate": 9.237017330959233e-06, | |
| "loss": 0.9646, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.7234042553191489, | |
| "grad_norm": 0.3321105431295584, | |
| "learning_rate": 9.195861859604008e-06, | |
| "loss": 0.9775, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.726950354609929, | |
| "grad_norm": 0.336145975690601, | |
| "learning_rate": 9.154720092662986e-06, | |
| "loss": 0.9748, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.7304964539007093, | |
| "grad_norm": 0.34486030423878333, | |
| "learning_rate": 9.113592731289102e-06, | |
| "loss": 0.9214, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.7340425531914894, | |
| "grad_norm": 0.3432838041796281, | |
| "learning_rate": 9.072480476389782e-06, | |
| "loss": 0.9536, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.7375886524822695, | |
| "grad_norm": 0.35613857468820703, | |
| "learning_rate": 9.031384028615005e-06, | |
| "loss": 1.0034, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.7411347517730498, | |
| "grad_norm": 0.3361342737820547, | |
| "learning_rate": 8.99030408834535e-06, | |
| "loss": 0.9503, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.7446808510638299, | |
| "grad_norm": 0.363558506677557, | |
| "learning_rate": 8.94924135568009e-06, | |
| "loss": 0.9651, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.74822695035461, | |
| "grad_norm": 0.33621685011472696, | |
| "learning_rate": 8.908196530425213e-06, | |
| "loss": 0.986, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.75177304964539, | |
| "grad_norm": 0.33671054356465846, | |
| "learning_rate": 8.867170312081546e-06, | |
| "loss": 1.0193, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.7553191489361701, | |
| "grad_norm": 0.35209822799689444, | |
| "learning_rate": 8.8261633998328e-06, | |
| "loss": 0.9437, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.7588652482269502, | |
| "grad_norm": 0.3553864898061415, | |
| "learning_rate": 8.785176492533656e-06, | |
| "loss": 0.9782, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.7624113475177305, | |
| "grad_norm": 0.33174231265756776, | |
| "learning_rate": 8.744210288697882e-06, | |
| "loss": 0.9628, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.7659574468085106, | |
| "grad_norm": 0.3409297743356347, | |
| "learning_rate": 8.703265486486391e-06, | |
| "loss": 0.9449, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.7695035460992907, | |
| "grad_norm": 0.3445185706849665, | |
| "learning_rate": 8.662342783695381e-06, | |
| "loss": 0.9841, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.773049645390071, | |
| "grad_norm": 0.3441733375131873, | |
| "learning_rate": 8.621442877744409e-06, | |
| "loss": 0.9841, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.7765957446808511, | |
| "grad_norm": 0.35173263772752744, | |
| "learning_rate": 8.58056646566452e-06, | |
| "loss": 0.9635, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.7801418439716312, | |
| "grad_norm": 0.3823010790918624, | |
| "learning_rate": 8.539714244086381e-06, | |
| "loss": 0.9493, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.7836879432624113, | |
| "grad_norm": 0.3312383340054398, | |
| "learning_rate": 8.49888690922838e-06, | |
| "loss": 0.9869, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.7872340425531914, | |
| "grad_norm": 0.3560759208074366, | |
| "learning_rate": 8.458085156884791e-06, | |
| "loss": 0.9595, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.7907801418439715, | |
| "grad_norm": 0.36728196100689803, | |
| "learning_rate": 8.41730968241389e-06, | |
| "loss": 0.9542, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.7943262411347518, | |
| "grad_norm": 0.32868509859724265, | |
| "learning_rate": 8.376561180726115e-06, | |
| "loss": 0.9237, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.797872340425532, | |
| "grad_norm": 0.33553779719082805, | |
| "learning_rate": 8.33584034627224e-06, | |
| "loss": 0.9668, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.8014184397163122, | |
| "grad_norm": 0.3592656530700375, | |
| "learning_rate": 8.295147873031502e-06, | |
| "loss": 0.9687, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.8049645390070923, | |
| "grad_norm": 0.3330489518869409, | |
| "learning_rate": 8.254484454499817e-06, | |
| "loss": 0.9594, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.8085106382978724, | |
| "grad_norm": 0.37275609779719715, | |
| "learning_rate": 8.213850783677926e-06, | |
| "loss": 0.9727, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.8120567375886525, | |
| "grad_norm": 0.35997150953099766, | |
| "learning_rate": 8.1732475530596e-06, | |
| "loss": 0.9343, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.8156028368794326, | |
| "grad_norm": 0.3382875860314504, | |
| "learning_rate": 8.132675454619846e-06, | |
| "loss": 0.9812, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.8191489361702127, | |
| "grad_norm": 0.38581953129694097, | |
| "learning_rate": 8.092135179803091e-06, | |
| "loss": 0.92, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.8226950354609928, | |
| "grad_norm": 0.38913155301147206, | |
| "learning_rate": 8.051627419511429e-06, | |
| "loss": 0.9934, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.826241134751773, | |
| "grad_norm": 0.37743270636171417, | |
| "learning_rate": 8.011152864092812e-06, | |
| "loss": 0.9969, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.8297872340425532, | |
| "grad_norm": 0.33196718140374704, | |
| "learning_rate": 7.97071220332932e-06, | |
| "loss": 0.9646, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 0.3618820601707212, | |
| "learning_rate": 7.930306126425377e-06, | |
| "loss": 0.911, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.8368794326241136, | |
| "grad_norm": 0.36679621653721584, | |
| "learning_rate": 7.889935321996014e-06, | |
| "loss": 0.9445, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.8404255319148937, | |
| "grad_norm": 0.3667701675004188, | |
| "learning_rate": 7.849600478055145e-06, | |
| "loss": 0.9695, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.8439716312056738, | |
| "grad_norm": 0.34841171823590084, | |
| "learning_rate": 7.809302282003823e-06, | |
| "loss": 0.934, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.8475177304964538, | |
| "grad_norm": 0.35865381928135853, | |
| "learning_rate": 7.769041420618545e-06, | |
| "loss": 0.9229, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.851063829787234, | |
| "grad_norm": 0.3606496256848997, | |
| "learning_rate": 7.72881858003953e-06, | |
| "loss": 0.9691, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.854609929078014, | |
| "grad_norm": 0.34646840430668596, | |
| "learning_rate": 7.688634445759026e-06, | |
| "loss": 0.9578, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.8581560283687943, | |
| "grad_norm": 0.3619989219136004, | |
| "learning_rate": 7.648489702609653e-06, | |
| "loss": 0.9504, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.8617021276595744, | |
| "grad_norm": 0.36867918774777586, | |
| "learning_rate": 7.608385034752696e-06, | |
| "loss": 0.9504, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.8652482269503547, | |
| "grad_norm": 0.35376310088793333, | |
| "learning_rate": 7.56832112566647e-06, | |
| "loss": 0.9848, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.8687943262411348, | |
| "grad_norm": 0.3862728350589978, | |
| "learning_rate": 7.528298658134658e-06, | |
| "loss": 0.9575, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.872340425531915, | |
| "grad_norm": 0.322531140818402, | |
| "learning_rate": 7.48831831423469e-06, | |
| "loss": 0.9162, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.875886524822695, | |
| "grad_norm": 0.37085381420014846, | |
| "learning_rate": 7.448380775326108e-06, | |
| "loss": 0.9554, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.8794326241134751, | |
| "grad_norm": 0.35655546944032424, | |
| "learning_rate": 7.408486722038943e-06, | |
| "loss": 0.9625, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.8829787234042552, | |
| "grad_norm": 0.3178457155060676, | |
| "learning_rate": 7.3686368342621496e-06, | |
| "loss": 0.9307, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.8865248226950353, | |
| "grad_norm": 0.3406313695465732, | |
| "learning_rate": 7.32883179113198e-06, | |
| "loss": 0.9651, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.8900709219858156, | |
| "grad_norm": 0.346901645086471, | |
| "learning_rate": 7.289072271020434e-06, | |
| "loss": 0.934, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.8936170212765957, | |
| "grad_norm": 0.3663202173018621, | |
| "learning_rate": 7.249358951523693e-06, | |
| "loss": 0.9966, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.897163120567376, | |
| "grad_norm": 0.33452988005856144, | |
| "learning_rate": 7.2096925094505695e-06, | |
| "loss": 0.9684, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.900709219858156, | |
| "grad_norm": 0.3416578941577139, | |
| "learning_rate": 7.170073620810975e-06, | |
| "loss": 0.963, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.9042553191489362, | |
| "grad_norm": 0.34915133717799923, | |
| "learning_rate": 7.130502960804397e-06, | |
| "loss": 0.9705, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.9078014184397163, | |
| "grad_norm": 0.3873543770637985, | |
| "learning_rate": 7.090981203808387e-06, | |
| "loss": 0.9533, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.9113475177304964, | |
| "grad_norm": 0.35578865044419283, | |
| "learning_rate": 7.051509023367087e-06, | |
| "loss": 0.9742, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.9148936170212765, | |
| "grad_norm": 0.38862412698080284, | |
| "learning_rate": 7.012087092179725e-06, | |
| "loss": 0.9822, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.9184397163120568, | |
| "grad_norm": 0.35817528542327143, | |
| "learning_rate": 6.972716082089173e-06, | |
| "loss": 0.9672, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.9219858156028369, | |
| "grad_norm": 0.33008334862633676, | |
| "learning_rate": 6.93339666407048e-06, | |
| "loss": 0.9608, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.925531914893617, | |
| "grad_norm": 0.3338787358040796, | |
| "learning_rate": 6.894129508219444e-06, | |
| "loss": 0.9571, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.9290780141843973, | |
| "grad_norm": 0.3505646347148005, | |
| "learning_rate": 6.854915283741198e-06, | |
| "loss": 0.9578, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.9326241134751774, | |
| "grad_norm": 0.33261145396470737, | |
| "learning_rate": 6.81575465893879e-06, | |
| "loss": 0.9647, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.9361702127659575, | |
| "grad_norm": 0.3226217535211914, | |
| "learning_rate": 6.776648301201817e-06, | |
| "loss": 0.9398, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.9397163120567376, | |
| "grad_norm": 0.3278245419317454, | |
| "learning_rate": 6.7375968769950255e-06, | |
| "loss": 0.9135, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.9432624113475176, | |
| "grad_norm": 0.3343650792150059, | |
| "learning_rate": 6.698601051846961e-06, | |
| "loss": 0.947, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.9468085106382977, | |
| "grad_norm": 0.32170493330780797, | |
| "learning_rate": 6.659661490338648e-06, | |
| "loss": 0.9202, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.950354609929078, | |
| "grad_norm": 0.32984166937379644, | |
| "learning_rate": 6.620778856092227e-06, | |
| "loss": 0.9876, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.9539007092198581, | |
| "grad_norm": 0.3205825135290648, | |
| "learning_rate": 6.5819538117596735e-06, | |
| "loss": 0.9587, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.9574468085106385, | |
| "grad_norm": 0.3239884548225763, | |
| "learning_rate": 6.5431870190114925e-06, | |
| "loss": 0.9502, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.9609929078014185, | |
| "grad_norm": 0.32314508912258516, | |
| "learning_rate": 6.504479138525437e-06, | |
| "loss": 0.9579, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.9645390070921986, | |
| "grad_norm": 0.3295935728159571, | |
| "learning_rate": 6.465830829975273e-06, | |
| "loss": 0.9448, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.9680851063829787, | |
| "grad_norm": 0.36994353060133206, | |
| "learning_rate": 6.427242752019499e-06, | |
| "loss": 0.9572, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.9716312056737588, | |
| "grad_norm": 0.3574700453216689, | |
| "learning_rate": 6.388715562290163e-06, | |
| "loss": 0.9871, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.975177304964539, | |
| "grad_norm": 0.313649405440105, | |
| "learning_rate": 6.350249917381619e-06, | |
| "loss": 0.9548, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.978723404255319, | |
| "grad_norm": 0.31820131189414436, | |
| "learning_rate": 6.3118464728393515e-06, | |
| "loss": 0.9808, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.9822695035460993, | |
| "grad_norm": 0.3457642013148487, | |
| "learning_rate": 6.273505883148815e-06, | |
| "loss": 0.9583, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.9858156028368794, | |
| "grad_norm": 0.33267823822326276, | |
| "learning_rate": 6.235228801724254e-06, | |
| "loss": 0.9509, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.9893617021276597, | |
| "grad_norm": 0.3497300533666417, | |
| "learning_rate": 6.197015880897599e-06, | |
| "loss": 0.9602, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.9929078014184398, | |
| "grad_norm": 0.34360079386079867, | |
| "learning_rate": 6.158867771907319e-06, | |
| "loss": 0.9472, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.99645390070922, | |
| "grad_norm": 0.32442906842984515, | |
| "learning_rate": 6.120785124887339e-06, | |
| "loss": 0.9132, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.34622104170979245, | |
| "learning_rate": 6.082768588855963e-06, | |
| "loss": 0.9596, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.00354609929078, | |
| "grad_norm": 0.4566911346970862, | |
| "learning_rate": 6.0448188117048e-06, | |
| "loss": 0.8959, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.00709219858156, | |
| "grad_norm": 0.3617148784050208, | |
| "learning_rate": 6.006936440187735e-06, | |
| "loss": 0.9563, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.0106382978723403, | |
| "grad_norm": 0.3681452353035544, | |
| "learning_rate": 5.9691221199099e-06, | |
| "loss": 0.9183, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.0141843971631204, | |
| "grad_norm": 0.3386502623032431, | |
| "learning_rate": 5.93137649531667e-06, | |
| "loss": 0.9249, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.017730496453901, | |
| "grad_norm": 0.34223363446968597, | |
| "learning_rate": 5.893700209682691e-06, | |
| "loss": 0.9212, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.021276595744681, | |
| "grad_norm": 0.3918409012245659, | |
| "learning_rate": 5.856093905100899e-06, | |
| "loss": 0.8968, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.024822695035461, | |
| "grad_norm": 0.3365985447006349, | |
| "learning_rate": 5.818558222471603e-06, | |
| "loss": 0.9246, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.028368794326241, | |
| "grad_norm": 0.3433680975231668, | |
| "learning_rate": 5.7810938014915284e-06, | |
| "loss": 0.9053, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.0319148936170213, | |
| "grad_norm": 0.3607462051926254, | |
| "learning_rate": 5.74370128064294e-06, | |
| "loss": 0.9063, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.0354609929078014, | |
| "grad_norm": 0.3394654339420203, | |
| "learning_rate": 5.7063812971827655e-06, | |
| "loss": 0.9299, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.0390070921985815, | |
| "grad_norm": 0.32380633576578366, | |
| "learning_rate": 5.669134487131707e-06, | |
| "loss": 0.9036, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.0425531914893615, | |
| "grad_norm": 0.3799496700402083, | |
| "learning_rate": 5.631961485263436e-06, | |
| "loss": 0.9188, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.0460992907801416, | |
| "grad_norm": 0.36059710867622713, | |
| "learning_rate": 5.594862925093745e-06, | |
| "loss": 0.9567, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.049645390070922, | |
| "grad_norm": 0.31267668524991454, | |
| "learning_rate": 5.557839438869764e-06, | |
| "loss": 0.8937, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.0531914893617023, | |
| "grad_norm": 0.34523656875027586, | |
| "learning_rate": 5.520891657559197e-06, | |
| "loss": 0.9055, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.0567375886524824, | |
| "grad_norm": 0.3428830226809694, | |
| "learning_rate": 5.484020210839547e-06, | |
| "loss": 0.9121, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.0602836879432624, | |
| "grad_norm": 0.34407502937578527, | |
| "learning_rate": 5.447225727087399e-06, | |
| "loss": 0.8889, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.0638297872340425, | |
| "grad_norm": 0.3639918638016257, | |
| "learning_rate": 5.410508833367709e-06, | |
| "loss": 0.9199, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.0673758865248226, | |
| "grad_norm": 0.33877778305397416, | |
| "learning_rate": 5.373870155423106e-06, | |
| "loss": 0.9459, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.0709219858156027, | |
| "grad_norm": 0.31092698823821685, | |
| "learning_rate": 5.337310317663255e-06, | |
| "loss": 0.9379, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.074468085106383, | |
| "grad_norm": 0.44030677058791035, | |
| "learning_rate": 5.300829943154181e-06, | |
| "loss": 0.8792, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.078014184397163, | |
| "grad_norm": 0.32870413268141346, | |
| "learning_rate": 5.264429653607675e-06, | |
| "loss": 0.9146, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.0815602836879434, | |
| "grad_norm": 0.406809678755843, | |
| "learning_rate": 5.228110069370689e-06, | |
| "loss": 0.9238, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.0851063829787235, | |
| "grad_norm": 0.3298950250685358, | |
| "learning_rate": 5.191871809414763e-06, | |
| "loss": 0.9119, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.0886524822695036, | |
| "grad_norm": 0.326775177491553, | |
| "learning_rate": 5.155715491325493e-06, | |
| "loss": 0.9088, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.0921985815602837, | |
| "grad_norm": 0.33745417440187936, | |
| "learning_rate": 5.119641731291971e-06, | |
| "loss": 0.9006, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.095744680851064, | |
| "grad_norm": 0.33773499121718675, | |
| "learning_rate": 5.083651144096323e-06, | |
| "loss": 0.9447, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.099290780141844, | |
| "grad_norm": 0.346227680847854, | |
| "learning_rate": 5.047744343103203e-06, | |
| "loss": 0.8952, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.102836879432624, | |
| "grad_norm": 0.3416473020704537, | |
| "learning_rate": 5.011921940249349e-06, | |
| "loss": 0.9094, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.106382978723404, | |
| "grad_norm": 0.3345099331995336, | |
| "learning_rate": 4.976184546033167e-06, | |
| "loss": 0.9292, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.1099290780141846, | |
| "grad_norm": 0.3373034338886128, | |
| "learning_rate": 4.940532769504297e-06, | |
| "loss": 0.9096, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.1134751773049647, | |
| "grad_norm": 0.3429936434310372, | |
| "learning_rate": 4.904967218253271e-06, | |
| "loss": 0.9183, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.117021276595745, | |
| "grad_norm": 0.3173720112615683, | |
| "learning_rate": 4.869488498401126e-06, | |
| "loss": 0.8964, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.120567375886525, | |
| "grad_norm": 0.35213901124678976, | |
| "learning_rate": 4.834097214589082e-06, | |
| "loss": 0.9403, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.124113475177305, | |
| "grad_norm": 0.3493639815881956, | |
| "learning_rate": 4.7987939699682605e-06, | |
| "loss": 0.9279, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.127659574468085, | |
| "grad_norm": 0.34103344147668196, | |
| "learning_rate": 4.763579366189367e-06, | |
| "loss": 0.9153, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.131205673758865, | |
| "grad_norm": 0.33549617227086487, | |
| "learning_rate": 4.728454003392475e-06, | |
| "loss": 0.9226, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.1347517730496453, | |
| "grad_norm": 0.32714622326432746, | |
| "learning_rate": 4.69341848019677e-06, | |
| "loss": 0.9311, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.1382978723404253, | |
| "grad_norm": 0.3311453632580336, | |
| "learning_rate": 4.658473393690359e-06, | |
| "loss": 0.9084, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.141843971631206, | |
| "grad_norm": 0.38675624940083536, | |
| "learning_rate": 4.623619339420098e-06, | |
| "loss": 0.8874, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.145390070921986, | |
| "grad_norm": 0.3279868665844946, | |
| "learning_rate": 4.588856911381433e-06, | |
| "loss": 0.9217, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.148936170212766, | |
| "grad_norm": 0.3217769678798751, | |
| "learning_rate": 4.554186702008292e-06, | |
| "loss": 0.931, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.152482269503546, | |
| "grad_norm": 0.4389700818952468, | |
| "learning_rate": 4.519609302162972e-06, | |
| "loss": 0.9032, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.1560283687943262, | |
| "grad_norm": 0.3658714505599075, | |
| "learning_rate": 4.485125301126072e-06, | |
| "loss": 0.9136, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.1595744680851063, | |
| "grad_norm": 0.35358017573741335, | |
| "learning_rate": 4.4507352865864686e-06, | |
| "loss": 0.9179, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.1631205673758864, | |
| "grad_norm": 0.32146202938676643, | |
| "learning_rate": 4.416439844631271e-06, | |
| "loss": 0.9296, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 0.31895440604586967, | |
| "learning_rate": 4.38223955973586e-06, | |
| "loss": 0.901, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.1702127659574466, | |
| "grad_norm": 0.6707694969413162, | |
| "learning_rate": 4.348135014753906e-06, | |
| "loss": 0.8751, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.173758865248227, | |
| "grad_norm": 0.3545708295368673, | |
| "learning_rate": 4.314126790907455e-06, | |
| "loss": 0.9363, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.1773049645390072, | |
| "grad_norm": 0.32363120553664343, | |
| "learning_rate": 4.280215467777005e-06, | |
| "loss": 0.9259, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.1808510638297873, | |
| "grad_norm": 0.34309179301070913, | |
| "learning_rate": 4.246401623291634e-06, | |
| "loss": 0.8897, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.1843971631205674, | |
| "grad_norm": 0.3453687929831591, | |
| "learning_rate": 4.2126858337191675e-06, | |
| "loss": 0.9135, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.1879432624113475, | |
| "grad_norm": 0.43729618573609375, | |
| "learning_rate": 4.179068673656332e-06, | |
| "loss": 0.9041, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.1914893617021276, | |
| "grad_norm": 0.3202082582840827, | |
| "learning_rate": 4.145550716018974e-06, | |
| "loss": 0.9135, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.1950354609929077, | |
| "grad_norm": 0.3246145160502331, | |
| "learning_rate": 4.112132532032302e-06, | |
| "loss": 0.8856, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.198581560283688, | |
| "grad_norm": 0.3440105242186509, | |
| "learning_rate": 4.078814691221139e-06, | |
| "loss": 0.958, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.202127659574468, | |
| "grad_norm": 0.32716180743353473, | |
| "learning_rate": 4.045597761400234e-06, | |
| "loss": 0.9002, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.2056737588652484, | |
| "grad_norm": 0.32188979652918004, | |
| "learning_rate": 4.01248230866456e-06, | |
| "loss": 0.9512, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.2092198581560285, | |
| "grad_norm": 0.32072445524392357, | |
| "learning_rate": 3.979468897379697e-06, | |
| "loss": 0.9526, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.2127659574468086, | |
| "grad_norm": 0.3387410378590688, | |
| "learning_rate": 3.946558090172182e-06, | |
| "loss": 0.9277, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.2163120567375887, | |
| "grad_norm": 0.3196709727101075, | |
| "learning_rate": 3.9137504479199405e-06, | |
| "loss": 0.8677, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.219858156028369, | |
| "grad_norm": 0.3255137199624219, | |
| "learning_rate": 3.881046529742734e-06, | |
| "loss": 0.9138, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.223404255319149, | |
| "grad_norm": 0.30874496281243, | |
| "learning_rate": 3.848446892992604e-06, | |
| "loss": 0.9102, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.226950354609929, | |
| "grad_norm": 0.5051339031173951, | |
| "learning_rate": 3.815952093244405e-06, | |
| "loss": 0.9271, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.230496453900709, | |
| "grad_norm": 0.3176852007190203, | |
| "learning_rate": 3.7835626842863105e-06, | |
| "loss": 0.9647, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.2340425531914896, | |
| "grad_norm": 0.3220485429110355, | |
| "learning_rate": 3.7512792181103872e-06, | |
| "loss": 0.9188, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.2375886524822697, | |
| "grad_norm": 0.3441318897854401, | |
| "learning_rate": 3.7191022449031965e-06, | |
| "loss": 0.9265, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.2411347517730498, | |
| "grad_norm": 0.3141082795259724, | |
| "learning_rate": 3.6870323130363972e-06, | |
| "loss": 0.9366, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.24468085106383, | |
| "grad_norm": 0.36501665747304923, | |
| "learning_rate": 3.6550699690574133e-06, | |
| "loss": 0.8975, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.24822695035461, | |
| "grad_norm": 0.3067756636590056, | |
| "learning_rate": 3.623215757680113e-06, | |
| "loss": 0.9314, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.25177304964539, | |
| "grad_norm": 0.325736875227407, | |
| "learning_rate": 3.591470221775544e-06, | |
| "loss": 0.8838, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.25531914893617, | |
| "grad_norm": 0.3363855691116925, | |
| "learning_rate": 3.5598339023626514e-06, | |
| "loss": 0.9203, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.2588652482269502, | |
| "grad_norm": 0.3201591221800834, | |
| "learning_rate": 3.5283073385990762e-06, | |
| "loss": 0.8914, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.2624113475177303, | |
| "grad_norm": 0.3057573202166175, | |
| "learning_rate": 3.4968910677719746e-06, | |
| "loss": 0.8962, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.2659574468085104, | |
| "grad_norm": 0.31394171796231907, | |
| "learning_rate": 3.4655856252888376e-06, | |
| "loss": 0.8974, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.269503546099291, | |
| "grad_norm": 0.3251813869113006, | |
| "learning_rate": 3.434391544668383e-06, | |
| "loss": 0.9322, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.273049645390071, | |
| "grad_norm": 0.3315408177538822, | |
| "learning_rate": 3.4033093575314645e-06, | |
| "loss": 0.9369, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.276595744680851, | |
| "grad_norm": 0.3241398323929304, | |
| "learning_rate": 3.372339593591998e-06, | |
| "loss": 0.8951, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.280141843971631, | |
| "grad_norm": 0.3222392701358019, | |
| "learning_rate": 3.341482780647952e-06, | |
| "loss": 0.9324, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.2836879432624113, | |
| "grad_norm": 0.3078014669602889, | |
| "learning_rate": 3.310739444572336e-06, | |
| "loss": 0.9195, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.2872340425531914, | |
| "grad_norm": 0.30721991786253994, | |
| "learning_rate": 3.2801101093042444e-06, | |
| "loss": 0.9166, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.2907801418439715, | |
| "grad_norm": 0.32609727890595375, | |
| "learning_rate": 3.249595296839937e-06, | |
| "loss": 0.9395, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.2943262411347516, | |
| "grad_norm": 0.31254234076499143, | |
| "learning_rate": 3.219195527223923e-06, | |
| "loss": 0.9041, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.297872340425532, | |
| "grad_norm": 0.3176629795843061, | |
| "learning_rate": 3.1889113185401223e-06, | |
| "loss": 0.9306, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.301418439716312, | |
| "grad_norm": 0.30783904152042746, | |
| "learning_rate": 3.1587431869030126e-06, | |
| "loss": 0.9353, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.3049645390070923, | |
| "grad_norm": 0.30800111746280723, | |
| "learning_rate": 3.1286916464488506e-06, | |
| "loss": 0.8775, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.3085106382978724, | |
| "grad_norm": 0.3137322351937391, | |
| "learning_rate": 3.098757209326898e-06, | |
| "loss": 0.9063, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.3120567375886525, | |
| "grad_norm": 0.31125500054596683, | |
| "learning_rate": 3.068940385690704e-06, | |
| "loss": 0.8757, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.3156028368794326, | |
| "grad_norm": 0.3120079580710984, | |
| "learning_rate": 3.0392416836894066e-06, | |
| "loss": 0.855, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.3191489361702127, | |
| "grad_norm": 0.31258285093042815, | |
| "learning_rate": 3.0096616094590712e-06, | |
| "loss": 0.8695, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.3226950354609928, | |
| "grad_norm": 0.31805496506154607, | |
| "learning_rate": 2.9802006671140583e-06, | |
| "loss": 0.9319, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.326241134751773, | |
| "grad_norm": 0.3270645593794086, | |
| "learning_rate": 2.950859358738453e-06, | |
| "loss": 0.9082, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.329787234042553, | |
| "grad_norm": 0.3121962070049027, | |
| "learning_rate": 2.9216381843774812e-06, | |
| "loss": 0.9174, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.32503615811718056, | |
| "learning_rate": 2.8925376420290153e-06, | |
| "loss": 0.9192, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.3368794326241136, | |
| "grad_norm": 0.3127522704871516, | |
| "learning_rate": 2.8635582276350606e-06, | |
| "loss": 0.9003, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.3404255319148937, | |
| "grad_norm": 0.3159719914352033, | |
| "learning_rate": 2.834700435073319e-06, | |
| "loss": 0.9108, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.3439716312056738, | |
| "grad_norm": 0.30569977271102494, | |
| "learning_rate": 2.8059647561487758e-06, | |
| "loss": 0.9276, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.347517730496454, | |
| "grad_norm": 0.33735915634007474, | |
| "learning_rate": 2.777351680585302e-06, | |
| "loss": 0.9223, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.351063829787234, | |
| "grad_norm": 0.3168581351513594, | |
| "learning_rate": 2.7488616960173265e-06, | |
| "loss": 0.9474, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.354609929078014, | |
| "grad_norm": 0.3260808135927312, | |
| "learning_rate": 2.7204952879815096e-06, | |
| "loss": 0.896, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.3581560283687946, | |
| "grad_norm": 0.3277475079557903, | |
| "learning_rate": 2.6922529399084773e-06, | |
| "loss": 0.909, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.3617021276595747, | |
| "grad_norm": 0.30002466874093064, | |
| "learning_rate": 2.6641351331145827e-06, | |
| "loss": 0.9368, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.3652482269503547, | |
| "grad_norm": 0.31282975390328294, | |
| "learning_rate": 2.6361423467936954e-06, | |
| "loss": 0.9352, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.368794326241135, | |
| "grad_norm": 0.3141556521218453, | |
| "learning_rate": 2.608275058009053e-06, | |
| "loss": 0.9102, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.372340425531915, | |
| "grad_norm": 0.29970361864896106, | |
| "learning_rate": 2.580533741685106e-06, | |
| "loss": 0.9079, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.375886524822695, | |
| "grad_norm": 0.32369551859579393, | |
| "learning_rate": 2.55291887059944e-06, | |
| "loss": 0.9367, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.379432624113475, | |
| "grad_norm": 0.3201091360816704, | |
| "learning_rate": 2.52543091537472e-06, | |
| "loss": 0.9146, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.382978723404255, | |
| "grad_norm": 0.3014262832146507, | |
| "learning_rate": 2.4980703444706567e-06, | |
| "loss": 0.8984, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.3865248226950353, | |
| "grad_norm": 0.30972681757749293, | |
| "learning_rate": 2.4708376241760412e-06, | |
| "loss": 0.9084, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.3900709219858154, | |
| "grad_norm": 0.3086157439660352, | |
| "learning_rate": 2.4437332186007768e-06, | |
| "loss": 0.9541, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.393617021276596, | |
| "grad_norm": 0.3278071415194472, | |
| "learning_rate": 2.416757589667985e-06, | |
| "loss": 0.9274, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.397163120567376, | |
| "grad_norm": 0.3064069385087261, | |
| "learning_rate": 2.3899111971061362e-06, | |
| "loss": 0.9118, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.400709219858156, | |
| "grad_norm": 0.31634968287824605, | |
| "learning_rate": 2.3631944984411947e-06, | |
| "loss": 0.9175, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.404255319148936, | |
| "grad_norm": 0.31946966423089546, | |
| "learning_rate": 2.3366079489888473e-06, | |
| "loss": 0.9104, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.4078014184397163, | |
| "grad_norm": 0.3073126775907417, | |
| "learning_rate": 2.3101520018467214e-06, | |
| "loss": 0.875, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.4113475177304964, | |
| "grad_norm": 0.36537378058022496, | |
| "learning_rate": 2.2838271078866714e-06, | |
| "loss": 0.9268, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.4148936170212765, | |
| "grad_norm": 0.3106854060545735, | |
| "learning_rate": 2.2576337157471083e-06, | |
| "loss": 0.9233, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.4184397163120566, | |
| "grad_norm": 0.3055447112943066, | |
| "learning_rate": 2.2315722718253276e-06, | |
| "loss": 0.9249, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.421985815602837, | |
| "grad_norm": 1.3811799608320727, | |
| "learning_rate": 2.205643220269921e-06, | |
| "loss": 0.9085, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.425531914893617, | |
| "grad_norm": 0.3002261018646463, | |
| "learning_rate": 2.179847002973201e-06, | |
| "loss": 0.9091, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.4290780141843973, | |
| "grad_norm": 0.30674947932499097, | |
| "learning_rate": 2.1541840595636678e-06, | |
| "loss": 0.9179, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.4326241134751774, | |
| "grad_norm": 0.3278506911887618, | |
| "learning_rate": 2.1286548273985288e-06, | |
| "loss": 0.8875, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.4361702127659575, | |
| "grad_norm": 0.3334426883441487, | |
| "learning_rate": 2.1032597415562218e-06, | |
| "loss": 0.9133, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.4397163120567376, | |
| "grad_norm": 0.3088493635408435, | |
| "learning_rate": 2.077999234829029e-06, | |
| "loss": 0.9126, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.4432624113475176, | |
| "grad_norm": 0.30877224814331133, | |
| "learning_rate": 2.052873737715675e-06, | |
| "loss": 0.9509, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.4468085106382977, | |
| "grad_norm": 0.3178316512514858, | |
| "learning_rate": 2.0278836784140043e-06, | |
| "loss": 0.9185, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.450354609929078, | |
| "grad_norm": 0.3187218318871324, | |
| "learning_rate": 2.0030294828136867e-06, | |
| "loss": 0.9007, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.453900709219858, | |
| "grad_norm": 0.3894554824207546, | |
| "learning_rate": 1.9783115744889447e-06, | |
| "loss": 0.9312, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.4574468085106385, | |
| "grad_norm": 0.29941537548459557, | |
| "learning_rate": 1.953730374691353e-06, | |
| "loss": 0.9449, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.4609929078014185, | |
| "grad_norm": 0.32251616671371774, | |
| "learning_rate": 1.9292863023426413e-06, | |
| "loss": 0.8896, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.4645390070921986, | |
| "grad_norm": 0.3257419849968331, | |
| "learning_rate": 1.9049797740275655e-06, | |
| "loss": 0.9188, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.4680851063829787, | |
| "grad_norm": 0.2981120109547122, | |
| "learning_rate": 1.8808112039868143e-06, | |
| "loss": 0.8842, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.471631205673759, | |
| "grad_norm": 0.3543784363301713, | |
| "learning_rate": 1.856781004109931e-06, | |
| "loss": 0.9478, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.475177304964539, | |
| "grad_norm": 0.293436787335392, | |
| "learning_rate": 1.832889583928308e-06, | |
| "loss": 0.9176, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.478723404255319, | |
| "grad_norm": 0.30922135296942016, | |
| "learning_rate": 1.8091373506082033e-06, | |
| "loss": 0.9176, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.482269503546099, | |
| "grad_norm": 0.3244973967157742, | |
| "learning_rate": 1.7855247089438022e-06, | |
| "loss": 0.9226, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.4858156028368796, | |
| "grad_norm": 0.30699412163526396, | |
| "learning_rate": 1.7620520613503223e-06, | |
| "loss": 0.9028, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.4893617021276597, | |
| "grad_norm": 0.31940538100442684, | |
| "learning_rate": 1.7387198078571433e-06, | |
| "loss": 0.9242, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.49290780141844, | |
| "grad_norm": 0.3040599236363412, | |
| "learning_rate": 1.7155283461010064e-06, | |
| "loss": 0.9349, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.49645390070922, | |
| "grad_norm": 0.3008914360277951, | |
| "learning_rate": 1.6924780713192235e-06, | |
| "loss": 0.9238, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.3047659130967524, | |
| "learning_rate": 1.6695693763429433e-06, | |
| "loss": 0.9035, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.50354609929078, | |
| "grad_norm": 0.33760543099149587, | |
| "learning_rate": 1.6468026515904712e-06, | |
| "loss": 0.9369, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.50709219858156, | |
| "grad_norm": 0.3019407982427647, | |
| "learning_rate": 1.6241782850605925e-06, | |
| "loss": 0.8842, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.5106382978723403, | |
| "grad_norm": 0.31360360735621046, | |
| "learning_rate": 1.6016966623259821e-06, | |
| "loss": 0.9079, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.5141843971631204, | |
| "grad_norm": 0.31362601497035747, | |
| "learning_rate": 1.5793581665266145e-06, | |
| "loss": 0.8965, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.5177304964539005, | |
| "grad_norm": 0.29493198915188423, | |
| "learning_rate": 1.5571631783632513e-06, | |
| "loss": 0.8966, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.521276595744681, | |
| "grad_norm": 0.3020594965557035, | |
| "learning_rate": 1.5351120760909388e-06, | |
| "loss": 0.8975, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.524822695035461, | |
| "grad_norm": 0.31339820524414264, | |
| "learning_rate": 1.5132052355125693e-06, | |
| "loss": 0.9028, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.528368794326241, | |
| "grad_norm": 0.2972691263629277, | |
| "learning_rate": 1.4914430299724747e-06, | |
| "loss": 0.9754, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.5319148936170213, | |
| "grad_norm": 0.29873853790696653, | |
| "learning_rate": 1.4698258303500624e-06, | |
| "loss": 0.9313, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.5354609929078014, | |
| "grad_norm": 0.3010289927697155, | |
| "learning_rate": 1.4483540050535038e-06, | |
| "loss": 0.9079, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.5390070921985815, | |
| "grad_norm": 0.30300125772170716, | |
| "learning_rate": 1.4270279200134406e-06, | |
| "loss": 0.8838, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.5425531914893615, | |
| "grad_norm": 0.32295056893197593, | |
| "learning_rate": 1.4058479386767554e-06, | |
| "loss": 0.9027, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.546099290780142, | |
| "grad_norm": 0.31767282991785056, | |
| "learning_rate": 1.3848144220003867e-06, | |
| "loss": 0.9429, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.549645390070922, | |
| "grad_norm": 0.3112289718032724, | |
| "learning_rate": 1.363927728445159e-06, | |
| "loss": 0.8892, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.2995342176385735, | |
| "learning_rate": 1.3431882139696916e-06, | |
| "loss": 0.8905, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.5567375886524824, | |
| "grad_norm": 0.3032745086759523, | |
| "learning_rate": 1.3225962320243225e-06, | |
| "loss": 0.886, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.5602836879432624, | |
| "grad_norm": 0.30710485752267236, | |
| "learning_rate": 1.3021521335450815e-06, | |
| "loss": 0.8881, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.5638297872340425, | |
| "grad_norm": 0.2999497506890768, | |
| "learning_rate": 1.281856266947723e-06, | |
| "loss": 0.9114, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.5673758865248226, | |
| "grad_norm": 0.30349869158909365, | |
| "learning_rate": 1.261708978121773e-06, | |
| "loss": 0.9228, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.5709219858156027, | |
| "grad_norm": 0.30950924252855117, | |
| "learning_rate": 1.241710610424647e-06, | |
| "loss": 0.9028, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.574468085106383, | |
| "grad_norm": 0.30623095850995313, | |
| "learning_rate": 1.2218615046757877e-06, | |
| "loss": 0.9086, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.578014184397163, | |
| "grad_norm": 0.29773890784186613, | |
| "learning_rate": 1.2021619991508614e-06, | |
| "loss": 0.8969, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.581560283687943, | |
| "grad_norm": 0.2966683063241434, | |
| "learning_rate": 1.182612429575999e-06, | |
| "loss": 0.913, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.5851063829787235, | |
| "grad_norm": 0.4233006249369692, | |
| "learning_rate": 1.1632131291220628e-06, | |
| "loss": 0.9152, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.5886524822695036, | |
| "grad_norm": 0.3061580358258722, | |
| "learning_rate": 1.1439644283989749e-06, | |
| "loss": 0.925, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.5921985815602837, | |
| "grad_norm": 0.3034994086209508, | |
| "learning_rate": 1.1248666554500831e-06, | |
| "loss": 0.9038, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.595744680851064, | |
| "grad_norm": 0.3022611564679423, | |
| "learning_rate": 1.1059201357465687e-06, | |
| "loss": 0.9354, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.599290780141844, | |
| "grad_norm": 0.29847498771161884, | |
| "learning_rate": 1.0871251921819027e-06, | |
| "loss": 0.9059, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.602836879432624, | |
| "grad_norm": 0.2819425119692849, | |
| "learning_rate": 1.0684821450663364e-06, | |
| "loss": 0.9686, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.6063829787234045, | |
| "grad_norm": 0.28132632342568453, | |
| "learning_rate": 1.0499913121214511e-06, | |
| "loss": 0.9344, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.6099290780141846, | |
| "grad_norm": 0.3084807404737308, | |
| "learning_rate": 1.0316530084747344e-06, | |
| "loss": 0.933, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.6134751773049647, | |
| "grad_norm": 0.31243627686333275, | |
| "learning_rate": 1.0134675466542166e-06, | |
| "loss": 0.9304, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.617021276595745, | |
| "grad_norm": 0.2916454484641493, | |
| "learning_rate": 9.954352365831455e-07, | |
| "loss": 0.9209, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.620567375886525, | |
| "grad_norm": 0.31575534533705746, | |
| "learning_rate": 9.775563855746918e-07, | |
| "loss": 0.9391, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.624113475177305, | |
| "grad_norm": 0.307647066815103, | |
| "learning_rate": 9.59831298326731e-07, | |
| "loss": 0.8955, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.627659574468085, | |
| "grad_norm": 0.30851549554520924, | |
| "learning_rate": 9.422602769166367e-07, | |
| "loss": 0.9238, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 2.631205673758865, | |
| "grad_norm": 0.30972798605613994, | |
| "learning_rate": 9.248436207961331e-07, | |
| "loss": 0.8963, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.6347517730496453, | |
| "grad_norm": 0.3078284112186838, | |
| "learning_rate": 9.075816267862036e-07, | |
| "loss": 0.8868, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 2.6382978723404253, | |
| "grad_norm": 0.2988265475360356, | |
| "learning_rate": 8.904745890720146e-07, | |
| "loss": 0.9384, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.6418439716312054, | |
| "grad_norm": 0.298948231596996, | |
| "learning_rate": 8.735227991979178e-07, | |
| "loss": 0.8951, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 2.645390070921986, | |
| "grad_norm": 0.32214256840915684, | |
| "learning_rate": 8.567265460624697e-07, | |
| "loss": 0.9464, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.648936170212766, | |
| "grad_norm": 0.2929574896449302, | |
| "learning_rate": 8.400861159135143e-07, | |
| "loss": 0.907, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 2.652482269503546, | |
| "grad_norm": 0.31556050296308524, | |
| "learning_rate": 8.236017923433104e-07, | |
| "loss": 0.9504, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.6560283687943262, | |
| "grad_norm": 0.2905809683490354, | |
| "learning_rate": 8.072738562836801e-07, | |
| "loss": 0.8962, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 2.6595744680851063, | |
| "grad_norm": 0.29418627161215105, | |
| "learning_rate": 7.911025860012445e-07, | |
| "loss": 0.9155, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.6631205673758864, | |
| "grad_norm": 0.3007094116002144, | |
| "learning_rate": 7.750882570926621e-07, | |
| "loss": 0.8947, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.29163111870403013, | |
| "learning_rate": 7.592311424799404e-07, | |
| "loss": 0.886, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.670212765957447, | |
| "grad_norm": 0.2849545087615603, | |
| "learning_rate": 7.435315124057874e-07, | |
| "loss": 0.9413, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.673758865248227, | |
| "grad_norm": 0.2957391801845664, | |
| "learning_rate": 7.279896344289971e-07, | |
| "loss": 0.8786, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.6773049645390072, | |
| "grad_norm": 0.3001167850961704, | |
| "learning_rate": 7.12605773419901e-07, | |
| "loss": 0.9233, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.6808510638297873, | |
| "grad_norm": 0.31548360829669203, | |
| "learning_rate": 6.973801915558432e-07, | |
| "loss": 0.886, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.6843971631205674, | |
| "grad_norm": 0.2966682832596965, | |
| "learning_rate": 6.823131483167167e-07, | |
| "loss": 0.9072, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.6879432624113475, | |
| "grad_norm": 0.3047122286238573, | |
| "learning_rate": 6.674049004805483e-07, | |
| "loss": 0.8865, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.6914893617021276, | |
| "grad_norm": 0.3034622193668983, | |
| "learning_rate": 6.526557021191093e-07, | |
| "loss": 0.9262, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.6950354609929077, | |
| "grad_norm": 0.3055046353235313, | |
| "learning_rate": 6.38065804593595e-07, | |
| "loss": 0.9175, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.698581560283688, | |
| "grad_norm": 1.273630558495017, | |
| "learning_rate": 6.236354565503333e-07, | |
| "loss": 0.9167, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.702127659574468, | |
| "grad_norm": 0.3166786008903364, | |
| "learning_rate": 6.093649039165616e-07, | |
| "loss": 0.9213, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.705673758865248, | |
| "grad_norm": 0.297235641807391, | |
| "learning_rate": 5.95254389896217e-07, | |
| "loss": 0.9473, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.7092198581560285, | |
| "grad_norm": 0.293044710631368, | |
| "learning_rate": 5.813041549658027e-07, | |
| "loss": 0.939, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.7127659574468086, | |
| "grad_norm": 0.2933241514093948, | |
| "learning_rate": 5.675144368702901e-07, | |
| "loss": 0.9254, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.7163120567375887, | |
| "grad_norm": 0.31330364961383234, | |
| "learning_rate": 5.538854706190633e-07, | |
| "loss": 0.9573, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.719858156028369, | |
| "grad_norm": 0.31715255291186145, | |
| "learning_rate": 5.404174884819102e-07, | |
| "loss": 0.8879, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.723404255319149, | |
| "grad_norm": 2.600016967668859, | |
| "learning_rate": 5.271107199850777e-07, | |
| "loss": 0.9455, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.726950354609929, | |
| "grad_norm": 0.30313200742910756, | |
| "learning_rate": 5.139653919073451e-07, | |
| "loss": 0.9212, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.7304964539007095, | |
| "grad_norm": 0.29776498237206805, | |
| "learning_rate": 5.009817282761675e-07, | |
| "loss": 0.9094, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.7340425531914896, | |
| "grad_norm": 0.285529807459011, | |
| "learning_rate": 4.881599503638546e-07, | |
| "loss": 0.9218, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.7375886524822697, | |
| "grad_norm": 0.290727814060904, | |
| "learning_rate": 4.7550027668380127e-07, | |
| "loss": 0.94, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.7411347517730498, | |
| "grad_norm": 0.31720136641130625, | |
| "learning_rate": 4.6300292298676497e-07, | |
| "loss": 0.9137, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.74468085106383, | |
| "grad_norm": 0.3103862279410868, | |
| "learning_rate": 4.506681022571846e-07, | |
| "loss": 0.9136, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.74822695035461, | |
| "grad_norm": 0.28157134238787646, | |
| "learning_rate": 4.384960247095549e-07, | |
| "loss": 0.8961, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.75177304964539, | |
| "grad_norm": 0.28882298841598714, | |
| "learning_rate": 4.2648689778484243e-07, | |
| "loss": 0.9073, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.75531914893617, | |
| "grad_norm": 0.2879740747490477, | |
| "learning_rate": 4.146409261469486e-07, | |
| "loss": 0.9342, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.7588652482269502, | |
| "grad_norm": 0.3186520970581419, | |
| "learning_rate": 4.029583116792235e-07, | |
| "loss": 0.8772, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.7624113475177303, | |
| "grad_norm": 0.29547086736383116, | |
| "learning_rate": 3.914392534810241e-07, | |
| "loss": 0.9141, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.7659574468085104, | |
| "grad_norm": 0.2980358066392843, | |
| "learning_rate": 3.800839478643259e-07, | |
| "loss": 0.9228, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.7695035460992905, | |
| "grad_norm": 0.28384720444289635, | |
| "learning_rate": 3.68892588350368e-07, | |
| "loss": 0.901, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.773049645390071, | |
| "grad_norm": 0.36764343853620207, | |
| "learning_rate": 3.578653656663655e-07, | |
| "loss": 0.9218, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.776595744680851, | |
| "grad_norm": 0.29961480527731943, | |
| "learning_rate": 3.4700246774225124e-07, | |
| "loss": 0.9248, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.780141843971631, | |
| "grad_norm": 0.29529547831757363, | |
| "learning_rate": 3.3630407970747457e-07, | |
| "loss": 0.9089, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.7836879432624113, | |
| "grad_norm": 0.2928645193950287, | |
| "learning_rate": 3.257703838878523e-07, | |
| "loss": 0.945, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.7872340425531914, | |
| "grad_norm": 0.3013558839362537, | |
| "learning_rate": 3.154015598024529e-07, | |
| "loss": 0.955, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.7907801418439715, | |
| "grad_norm": 0.3103783207215448, | |
| "learning_rate": 3.0519778416054023e-07, | |
| "loss": 0.8506, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.794326241134752, | |
| "grad_norm": 0.2821659794655092, | |
| "learning_rate": 2.951592308585671e-07, | |
| "loss": 0.9653, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.797872340425532, | |
| "grad_norm": 0.3057307542652081, | |
| "learning_rate": 2.8528607097719986e-07, | |
| "loss": 0.9494, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.801418439716312, | |
| "grad_norm": 0.30527993253257074, | |
| "learning_rate": 2.755784727784194e-07, | |
| "loss": 0.9221, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.8049645390070923, | |
| "grad_norm": 0.29700128829756606, | |
| "learning_rate": 2.6603660170263725e-07, | |
| "loss": 0.9206, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.8085106382978724, | |
| "grad_norm": 0.3190882543220009, | |
| "learning_rate": 2.5666062036588504e-07, | |
| "loss": 0.9236, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.8120567375886525, | |
| "grad_norm": 0.2930659555504179, | |
| "learning_rate": 2.474506885570416e-07, | |
| "loss": 0.9211, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.8156028368794326, | |
| "grad_norm": 0.29895327454226917, | |
| "learning_rate": 2.3840696323510826e-07, | |
| "loss": 0.909, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.8191489361702127, | |
| "grad_norm": 0.3005687202021333, | |
| "learning_rate": 2.2952959852653888e-07, | |
| "loss": 0.8934, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.8226950354609928, | |
| "grad_norm": 0.28501994772756833, | |
| "learning_rate": 2.208187457226052e-07, | |
| "loss": 0.9048, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.826241134751773, | |
| "grad_norm": 0.31036487157590603, | |
| "learning_rate": 2.1227455327682223e-07, | |
| "loss": 0.9531, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.829787234042553, | |
| "grad_norm": 0.28761242138340226, | |
| "learning_rate": 2.0389716680242368e-07, | |
| "loss": 0.9218, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 0.30278424167169304, | |
| "learning_rate": 1.956867290698683e-07, | |
| "loss": 0.9244, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.8368794326241136, | |
| "grad_norm": 0.29268419217196073, | |
| "learning_rate": 1.8764338000442083e-07, | |
| "loss": 0.8955, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.8404255319148937, | |
| "grad_norm": 0.3006157318897833, | |
| "learning_rate": 1.7976725668375493e-07, | |
| "loss": 0.9066, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.8439716312056738, | |
| "grad_norm": 0.2989253069129778, | |
| "learning_rate": 1.7205849333562509e-07, | |
| "loss": 0.9521, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.847517730496454, | |
| "grad_norm": 0.2948078480861051, | |
| "learning_rate": 1.6451722133557613e-07, | |
| "loss": 0.9087, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.851063829787234, | |
| "grad_norm": 0.28509008741050834, | |
| "learning_rate": 1.5714356920470297e-07, | |
| "loss": 0.8979, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.854609929078014, | |
| "grad_norm": 0.2878937554326602, | |
| "learning_rate": 1.4993766260746333e-07, | |
| "loss": 0.9296, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.8581560283687946, | |
| "grad_norm": 0.3079617762356363, | |
| "learning_rate": 1.4289962434953508e-07, | |
| "loss": 0.8937, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.8617021276595747, | |
| "grad_norm": 0.28625196843148937, | |
| "learning_rate": 1.3602957437571896e-07, | |
| "loss": 0.9351, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.8652482269503547, | |
| "grad_norm": 0.2923705943500138, | |
| "learning_rate": 1.293276297679058e-07, | |
| "loss": 0.9291, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.868794326241135, | |
| "grad_norm": 0.2820391476002473, | |
| "learning_rate": 1.2279390474306596e-07, | |
| "loss": 0.9227, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.872340425531915, | |
| "grad_norm": 0.5592736002471681, | |
| "learning_rate": 1.1642851065131632e-07, | |
| "loss": 0.9244, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.875886524822695, | |
| "grad_norm": 0.30536487775765697, | |
| "learning_rate": 1.1023155597401191e-07, | |
| "loss": 0.918, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.879432624113475, | |
| "grad_norm": 0.3029280843642643, | |
| "learning_rate": 1.042031463219062e-07, | |
| "loss": 0.9302, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.882978723404255, | |
| "grad_norm": 0.2994340315241542, | |
| "learning_rate": 9.8343384433347e-08, | |
| "loss": 0.9309, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.8865248226950353, | |
| "grad_norm": 0.3031555751284906, | |
| "learning_rate": 9.265237017252127e-08, | |
| "loss": 0.9233, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.8900709219858154, | |
| "grad_norm": 0.2859515624839857, | |
| "learning_rate": 8.7130200527763e-08, | |
| "loss": 0.9329, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.8936170212765955, | |
| "grad_norm": 0.30049527098612866, | |
| "learning_rate": 8.177696960989134e-08, | |
| "loss": 0.9497, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.897163120567376, | |
| "grad_norm": 0.29111500301529464, | |
| "learning_rate": 7.659276865061405e-08, | |
| "loss": 0.9346, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.900709219858156, | |
| "grad_norm": 0.31603907957179006, | |
| "learning_rate": 7.157768600096315e-08, | |
| "loss": 0.8679, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.904255319148936, | |
| "grad_norm": 0.2990219556138474, | |
| "learning_rate": 6.673180712979955e-08, | |
| "loss": 0.916, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.9078014184397163, | |
| "grad_norm": 0.30153042995412016, | |
| "learning_rate": 6.205521462235186e-08, | |
| "loss": 0.8636, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.9113475177304964, | |
| "grad_norm": 0.3029791908615506, | |
| "learning_rate": 5.754798817880547e-08, | |
| "loss": 0.8801, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.9148936170212765, | |
| "grad_norm": 0.2984269406531723, | |
| "learning_rate": 5.321020461295345e-08, | |
| "loss": 0.8671, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.918439716312057, | |
| "grad_norm": 0.2904036664928134, | |
| "learning_rate": 4.904193785087552e-08, | |
| "loss": 0.9076, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.921985815602837, | |
| "grad_norm": 0.30300705182935816, | |
| "learning_rate": 4.504325892968897e-08, | |
| "loss": 0.9292, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.925531914893617, | |
| "grad_norm": 0.29507244978624, | |
| "learning_rate": 4.1214235996331894e-08, | |
| "loss": 0.9023, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.9290780141843973, | |
| "grad_norm": 0.293245569012375, | |
| "learning_rate": 3.755493430640411e-08, | |
| "loss": 0.9244, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.9326241134751774, | |
| "grad_norm": 0.3166445042435038, | |
| "learning_rate": 3.406541622305248e-08, | |
| "loss": 0.8939, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.9361702127659575, | |
| "grad_norm": 0.28444259392761967, | |
| "learning_rate": 3.0745741215912897e-08, | |
| "loss": 0.9311, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.9397163120567376, | |
| "grad_norm": 0.29915613230977617, | |
| "learning_rate": 2.7595965860089947e-08, | |
| "loss": 0.8967, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.9432624113475176, | |
| "grad_norm": 0.28446692658506895, | |
| "learning_rate": 2.4616143835202166e-08, | |
| "loss": 0.8871, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.9468085106382977, | |
| "grad_norm": 0.2911373845781085, | |
| "learning_rate": 2.1806325924457196e-08, | |
| "loss": 0.8811, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.950354609929078, | |
| "grad_norm": 0.3061109679698943, | |
| "learning_rate": 1.9166560013791358e-08, | |
| "loss": 0.953, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.953900709219858, | |
| "grad_norm": 0.29911989312390747, | |
| "learning_rate": 1.669689109105588e-08, | |
| "loss": 0.9677, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.9574468085106385, | |
| "grad_norm": 0.2820359915616927, | |
| "learning_rate": 1.4397361245243046e-08, | |
| "loss": 0.9245, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.9609929078014185, | |
| "grad_norm": 0.29965639425928353, | |
| "learning_rate": 1.2268009665776793e-08, | |
| "loss": 0.9243, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.9645390070921986, | |
| "grad_norm": 0.29357391717111103, | |
| "learning_rate": 1.0308872641839885e-08, | |
| "loss": 0.9284, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.9680851063829787, | |
| "grad_norm": 0.29141026412523086, | |
| "learning_rate": 8.51998356175776e-09, | |
| "loss": 0.9194, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.971631205673759, | |
| "grad_norm": 0.29560515229715406, | |
| "learning_rate": 6.901372912426762e-09, | |
| "loss": 0.8976, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.975177304964539, | |
| "grad_norm": 0.29014624788605387, | |
| "learning_rate": 5.453068278796769e-09, | |
| "loss": 0.9236, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.978723404255319, | |
| "grad_norm": 0.3015143386594201, | |
| "learning_rate": 4.175094343402686e-09, | |
| "loss": 0.8851, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.9822695035460995, | |
| "grad_norm": 0.28915706574444056, | |
| "learning_rate": 3.0674728859392267e-09, | |
| "loss": 0.8792, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.9858156028368796, | |
| "grad_norm": 0.2934637584959333, | |
| "learning_rate": 2.130222782894542e-09, | |
| "loss": 0.919, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.9893617021276597, | |
| "grad_norm": 0.28832944418271755, | |
| "learning_rate": 1.3633600072249232e-09, | |
| "loss": 0.9196, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.99290780141844, | |
| "grad_norm": 0.3010464732427794, | |
| "learning_rate": 7.668976280839069e-10, | |
| "loss": 0.9249, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.99645390070922, | |
| "grad_norm": 0.2903066150490175, | |
| "learning_rate": 3.408458105991219e-10, | |
| "loss": 0.9185, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.2891940920591566, | |
| "learning_rate": 8.521181570242398e-11, | |
| "loss": 0.8954, | |
| "step": 846 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 846, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1043657926901760.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |