|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1065, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0028169014084507044, |
|
"grad_norm": 6.843351489795488, |
|
"learning_rate": 9.345794392523364e-08, |
|
"loss": 0.9385, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005633802816901409, |
|
"grad_norm": 6.607567814573529, |
|
"learning_rate": 1.8691588785046729e-07, |
|
"loss": 0.9283, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008450704225352112, |
|
"grad_norm": 6.902238922410287, |
|
"learning_rate": 2.8037383177570096e-07, |
|
"loss": 0.9641, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011267605633802818, |
|
"grad_norm": 6.946697059316272, |
|
"learning_rate": 3.7383177570093457e-07, |
|
"loss": 0.9717, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014084507042253521, |
|
"grad_norm": 7.003582107757224, |
|
"learning_rate": 4.6728971962616824e-07, |
|
"loss": 0.9639, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.016901408450704224, |
|
"grad_norm": 6.9376047256330144, |
|
"learning_rate": 5.607476635514019e-07, |
|
"loss": 0.9582, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01971830985915493, |
|
"grad_norm": 6.558029428164414, |
|
"learning_rate": 6.542056074766355e-07, |
|
"loss": 0.9168, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.022535211267605635, |
|
"grad_norm": 6.345482529471037, |
|
"learning_rate": 7.476635514018691e-07, |
|
"loss": 0.9253, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02535211267605634, |
|
"grad_norm": 6.6326272471163, |
|
"learning_rate": 8.411214953271029e-07, |
|
"loss": 0.9375, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.028169014084507043, |
|
"grad_norm": 6.464087540605822, |
|
"learning_rate": 9.345794392523365e-07, |
|
"loss": 0.9562, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030985915492957747, |
|
"grad_norm": 5.166432873083264, |
|
"learning_rate": 1.0280373831775702e-06, |
|
"loss": 0.8792, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03380281690140845, |
|
"grad_norm": 5.079046652537091, |
|
"learning_rate": 1.1214953271028038e-06, |
|
"loss": 0.8914, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.036619718309859155, |
|
"grad_norm": 5.037215339408657, |
|
"learning_rate": 1.2149532710280374e-06, |
|
"loss": 0.9172, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03943661971830986, |
|
"grad_norm": 4.6952926229942165, |
|
"learning_rate": 1.308411214953271e-06, |
|
"loss": 0.887, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04225352112676056, |
|
"grad_norm": 2.7137316275253864, |
|
"learning_rate": 1.4018691588785047e-06, |
|
"loss": 0.823, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04507042253521127, |
|
"grad_norm": 2.6632918255705675, |
|
"learning_rate": 1.4953271028037383e-06, |
|
"loss": 0.8242, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04788732394366197, |
|
"grad_norm": 2.451657467696323, |
|
"learning_rate": 1.588785046728972e-06, |
|
"loss": 0.8375, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05070422535211268, |
|
"grad_norm": 2.2343565320461978, |
|
"learning_rate": 1.6822429906542057e-06, |
|
"loss": 0.8349, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05352112676056338, |
|
"grad_norm": 2.1451406757683613, |
|
"learning_rate": 1.7757009345794394e-06, |
|
"loss": 0.8274, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.056338028169014086, |
|
"grad_norm": 2.5866576397571164, |
|
"learning_rate": 1.869158878504673e-06, |
|
"loss": 0.8195, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.059154929577464786, |
|
"grad_norm": 3.6068541588513856, |
|
"learning_rate": 1.962616822429907e-06, |
|
"loss": 0.8085, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.061971830985915494, |
|
"grad_norm": 3.7301124014564735, |
|
"learning_rate": 2.0560747663551404e-06, |
|
"loss": 0.8007, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0647887323943662, |
|
"grad_norm": 3.879846862236254, |
|
"learning_rate": 2.149532710280374e-06, |
|
"loss": 0.8258, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0676056338028169, |
|
"grad_norm": 3.257187025314909, |
|
"learning_rate": 2.2429906542056077e-06, |
|
"loss": 0.7603, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07042253521126761, |
|
"grad_norm": 3.0150439634525137, |
|
"learning_rate": 2.3364485981308413e-06, |
|
"loss": 0.7567, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07323943661971831, |
|
"grad_norm": 2.731962426951936, |
|
"learning_rate": 2.429906542056075e-06, |
|
"loss": 0.7809, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07605633802816901, |
|
"grad_norm": 2.0851170351121966, |
|
"learning_rate": 2.5233644859813085e-06, |
|
"loss": 0.7279, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07887323943661972, |
|
"grad_norm": 1.5871667506827638, |
|
"learning_rate": 2.616822429906542e-06, |
|
"loss": 0.7732, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08169014084507042, |
|
"grad_norm": 1.3057594440862028, |
|
"learning_rate": 2.7102803738317757e-06, |
|
"loss": 0.6913, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08450704225352113, |
|
"grad_norm": 1.3261878434703152, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 0.7487, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08732394366197183, |
|
"grad_norm": 1.3262343134044554, |
|
"learning_rate": 2.897196261682243e-06, |
|
"loss": 0.7357, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09014084507042254, |
|
"grad_norm": 1.23592234207026, |
|
"learning_rate": 2.9906542056074766e-06, |
|
"loss": 0.718, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09295774647887324, |
|
"grad_norm": 1.1342511290180883, |
|
"learning_rate": 3.08411214953271e-06, |
|
"loss": 0.6962, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09577464788732394, |
|
"grad_norm": 1.0467874998875826, |
|
"learning_rate": 3.177570093457944e-06, |
|
"loss": 0.6934, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09859154929577464, |
|
"grad_norm": 1.0265524735100418, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 0.6908, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10140845070422536, |
|
"grad_norm": 0.7759183188592782, |
|
"learning_rate": 3.3644859813084115e-06, |
|
"loss": 0.6902, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10422535211267606, |
|
"grad_norm": 0.7307747183179154, |
|
"learning_rate": 3.457943925233645e-06, |
|
"loss": 0.6436, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10704225352112676, |
|
"grad_norm": 0.9476449074596629, |
|
"learning_rate": 3.5514018691588787e-06, |
|
"loss": 0.6674, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10985915492957747, |
|
"grad_norm": 1.023136741079733, |
|
"learning_rate": 3.6448598130841123e-06, |
|
"loss": 0.6525, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11267605633802817, |
|
"grad_norm": 0.9756562165836902, |
|
"learning_rate": 3.738317757009346e-06, |
|
"loss": 0.6604, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11549295774647887, |
|
"grad_norm": 0.6835958250022881, |
|
"learning_rate": 3.8317757009345796e-06, |
|
"loss": 0.6718, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11830985915492957, |
|
"grad_norm": 0.6560085321954412, |
|
"learning_rate": 3.925233644859814e-06, |
|
"loss": 0.6408, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12112676056338029, |
|
"grad_norm": 0.8372121993022718, |
|
"learning_rate": 4.018691588785047e-06, |
|
"loss": 0.6352, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12394366197183099, |
|
"grad_norm": 0.9399706591180332, |
|
"learning_rate": 4.112149532710281e-06, |
|
"loss": 0.6882, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1267605633802817, |
|
"grad_norm": 0.6897032499753717, |
|
"learning_rate": 4.205607476635514e-06, |
|
"loss": 0.6635, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1295774647887324, |
|
"grad_norm": 0.5666941816418474, |
|
"learning_rate": 4.299065420560748e-06, |
|
"loss": 0.6223, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1323943661971831, |
|
"grad_norm": 0.6466666231083906, |
|
"learning_rate": 4.392523364485981e-06, |
|
"loss": 0.6485, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1352112676056338, |
|
"grad_norm": 0.6307749815943701, |
|
"learning_rate": 4.485981308411215e-06, |
|
"loss": 0.6631, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13802816901408452, |
|
"grad_norm": 0.6101350712843577, |
|
"learning_rate": 4.579439252336449e-06, |
|
"loss": 0.6319, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 0.5822616465924019, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 0.6536, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14366197183098592, |
|
"grad_norm": 0.5720766262956746, |
|
"learning_rate": 4.766355140186917e-06, |
|
"loss": 0.6441, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14647887323943662, |
|
"grad_norm": 0.5821005508097582, |
|
"learning_rate": 4.85981308411215e-06, |
|
"loss": 0.6445, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14929577464788732, |
|
"grad_norm": 0.7326245657069738, |
|
"learning_rate": 4.953271028037384e-06, |
|
"loss": 0.6532, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.15211267605633802, |
|
"grad_norm": 0.5055300266839129, |
|
"learning_rate": 5.046728971962617e-06, |
|
"loss": 0.6489, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.15492957746478872, |
|
"grad_norm": 0.5340536053639043, |
|
"learning_rate": 5.14018691588785e-06, |
|
"loss": 0.6414, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15774647887323945, |
|
"grad_norm": 0.5829492821591393, |
|
"learning_rate": 5.233644859813084e-06, |
|
"loss": 0.6152, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.16056338028169015, |
|
"grad_norm": 0.5453794801583319, |
|
"learning_rate": 5.3271028037383174e-06, |
|
"loss": 0.6039, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16338028169014085, |
|
"grad_norm": 0.5899665130491681, |
|
"learning_rate": 5.4205607476635515e-06, |
|
"loss": 0.6185, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.16619718309859155, |
|
"grad_norm": 0.5611513671408634, |
|
"learning_rate": 5.514018691588785e-06, |
|
"loss": 0.6148, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.16901408450704225, |
|
"grad_norm": 0.5104752150744826, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 0.6207, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17183098591549295, |
|
"grad_norm": 0.5222500246055776, |
|
"learning_rate": 5.700934579439253e-06, |
|
"loss": 0.6183, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17464788732394365, |
|
"grad_norm": 0.7031567337432721, |
|
"learning_rate": 5.794392523364486e-06, |
|
"loss": 0.6158, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.17746478873239438, |
|
"grad_norm": 0.5670544399349872, |
|
"learning_rate": 5.88785046728972e-06, |
|
"loss": 0.6164, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18028169014084508, |
|
"grad_norm": 0.5835195221742685, |
|
"learning_rate": 5.981308411214953e-06, |
|
"loss": 0.6143, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.18309859154929578, |
|
"grad_norm": 0.486716857038764, |
|
"learning_rate": 6.074766355140187e-06, |
|
"loss": 0.6218, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.18591549295774648, |
|
"grad_norm": 0.6518972141316477, |
|
"learning_rate": 6.16822429906542e-06, |
|
"loss": 0.6158, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.18873239436619718, |
|
"grad_norm": 0.5773322831933766, |
|
"learning_rate": 6.2616822429906544e-06, |
|
"loss": 0.6202, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.19154929577464788, |
|
"grad_norm": 0.5912349009905348, |
|
"learning_rate": 6.355140186915888e-06, |
|
"loss": 0.6219, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19436619718309858, |
|
"grad_norm": 0.6060790549903095, |
|
"learning_rate": 6.448598130841122e-06, |
|
"loss": 0.5999, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.19718309859154928, |
|
"grad_norm": 0.6216646819276722, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 0.6077, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.47329562993465146, |
|
"learning_rate": 6.635514018691589e-06, |
|
"loss": 0.6051, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2028169014084507, |
|
"grad_norm": 0.534157980811324, |
|
"learning_rate": 6.728971962616823e-06, |
|
"loss": 0.6089, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2056338028169014, |
|
"grad_norm": 0.49293802647714885, |
|
"learning_rate": 6.822429906542056e-06, |
|
"loss": 0.5944, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2084507042253521, |
|
"grad_norm": 0.5105802467170905, |
|
"learning_rate": 6.91588785046729e-06, |
|
"loss": 0.6192, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 0.4545810635734364, |
|
"learning_rate": 7.009345794392523e-06, |
|
"loss": 0.5849, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2140845070422535, |
|
"grad_norm": 0.5132015985707264, |
|
"learning_rate": 7.1028037383177574e-06, |
|
"loss": 0.5757, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21690140845070421, |
|
"grad_norm": 0.5240278152519751, |
|
"learning_rate": 7.196261682242991e-06, |
|
"loss": 0.593, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.21971830985915494, |
|
"grad_norm": 0.47719974239889557, |
|
"learning_rate": 7.289719626168225e-06, |
|
"loss": 0.578, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22253521126760564, |
|
"grad_norm": 0.5022510515134598, |
|
"learning_rate": 7.383177570093458e-06, |
|
"loss": 0.5789, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22535211267605634, |
|
"grad_norm": 0.5268449362714561, |
|
"learning_rate": 7.476635514018692e-06, |
|
"loss": 0.6014, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22816901408450704, |
|
"grad_norm": 0.50075545195209, |
|
"learning_rate": 7.570093457943926e-06, |
|
"loss": 0.5838, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23098591549295774, |
|
"grad_norm": 0.5519102966922635, |
|
"learning_rate": 7.663551401869159e-06, |
|
"loss": 0.5787, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.23380281690140844, |
|
"grad_norm": 0.5305728995724017, |
|
"learning_rate": 7.757009345794392e-06, |
|
"loss": 0.5654, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.23661971830985915, |
|
"grad_norm": 0.4984850288710446, |
|
"learning_rate": 7.850467289719627e-06, |
|
"loss": 0.5998, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.23943661971830985, |
|
"grad_norm": 0.5870272453921658, |
|
"learning_rate": 7.94392523364486e-06, |
|
"loss": 0.6066, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.24225352112676057, |
|
"grad_norm": 0.5113265334277904, |
|
"learning_rate": 8.037383177570094e-06, |
|
"loss": 0.6123, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.24507042253521127, |
|
"grad_norm": 0.490831722513275, |
|
"learning_rate": 8.130841121495327e-06, |
|
"loss": 0.5984, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.24788732394366197, |
|
"grad_norm": 0.5558569258369573, |
|
"learning_rate": 8.224299065420562e-06, |
|
"loss": 0.6069, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2507042253521127, |
|
"grad_norm": 0.49435562466880034, |
|
"learning_rate": 8.317757009345795e-06, |
|
"loss": 0.6304, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2535211267605634, |
|
"grad_norm": 0.49423876533813854, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 0.6034, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2563380281690141, |
|
"grad_norm": 0.5511212763406935, |
|
"learning_rate": 8.504672897196263e-06, |
|
"loss": 0.614, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2591549295774648, |
|
"grad_norm": 0.4543803002498734, |
|
"learning_rate": 8.598130841121496e-06, |
|
"loss": 0.5713, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2619718309859155, |
|
"grad_norm": 0.5706161712680489, |
|
"learning_rate": 8.69158878504673e-06, |
|
"loss": 0.6034, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2647887323943662, |
|
"grad_norm": 0.505724802570353, |
|
"learning_rate": 8.785046728971963e-06, |
|
"loss": 0.5643, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2676056338028169, |
|
"grad_norm": 0.517868035081914, |
|
"learning_rate": 8.878504672897197e-06, |
|
"loss": 0.598, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2704225352112676, |
|
"grad_norm": 0.5476188621020196, |
|
"learning_rate": 8.97196261682243e-06, |
|
"loss": 0.5783, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.27323943661971833, |
|
"grad_norm": 0.536506093724152, |
|
"learning_rate": 9.065420560747664e-06, |
|
"loss": 0.585, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.27605633802816903, |
|
"grad_norm": 0.5052391248701771, |
|
"learning_rate": 9.158878504672899e-06, |
|
"loss": 0.5856, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.27887323943661974, |
|
"grad_norm": 0.5507911743646905, |
|
"learning_rate": 9.252336448598132e-06, |
|
"loss": 0.5742, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 0.5667507509388369, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 0.5804, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28450704225352114, |
|
"grad_norm": 0.5367761197956139, |
|
"learning_rate": 9.439252336448598e-06, |
|
"loss": 0.6108, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.28732394366197184, |
|
"grad_norm": 0.6151261182976093, |
|
"learning_rate": 9.532710280373833e-06, |
|
"loss": 0.5803, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.29014084507042254, |
|
"grad_norm": 0.5630295655109795, |
|
"learning_rate": 9.626168224299066e-06, |
|
"loss": 0.569, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.29295774647887324, |
|
"grad_norm": 0.595128847891053, |
|
"learning_rate": 9.7196261682243e-06, |
|
"loss": 0.5949, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.29577464788732394, |
|
"grad_norm": 0.5932250260888466, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 0.5912, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.29859154929577464, |
|
"grad_norm": 0.554296183304856, |
|
"learning_rate": 9.906542056074768e-06, |
|
"loss": 0.5858, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.30140845070422534, |
|
"grad_norm": 0.5542586088977368, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5947, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.30422535211267604, |
|
"grad_norm": 0.5539014174596538, |
|
"learning_rate": 9.999973115104874e-06, |
|
"loss": 0.564, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.30704225352112674, |
|
"grad_norm": 0.5885183535059, |
|
"learning_rate": 9.999892460708615e-06, |
|
"loss": 0.6013, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.30985915492957744, |
|
"grad_norm": 0.47510239201375715, |
|
"learning_rate": 9.999758037678576e-06, |
|
"loss": 0.6016, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3126760563380282, |
|
"grad_norm": 0.5191594501253673, |
|
"learning_rate": 9.999569847460335e-06, |
|
"loss": 0.5849, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3154929577464789, |
|
"grad_norm": 0.5208229000776549, |
|
"learning_rate": 9.999327892077683e-06, |
|
"loss": 0.5767, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3183098591549296, |
|
"grad_norm": 0.5371441518969067, |
|
"learning_rate": 9.9990321741326e-06, |
|
"loss": 0.5763, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3211267605633803, |
|
"grad_norm": 0.5928018630279552, |
|
"learning_rate": 9.998682696805224e-06, |
|
"loss": 0.5594, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.323943661971831, |
|
"grad_norm": 0.5256757847980414, |
|
"learning_rate": 9.998279463853819e-06, |
|
"loss": 0.5829, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3267605633802817, |
|
"grad_norm": 0.6374633387905916, |
|
"learning_rate": 9.997822479614733e-06, |
|
"loss": 0.5829, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3295774647887324, |
|
"grad_norm": 0.5099003102667371, |
|
"learning_rate": 9.997311749002358e-06, |
|
"loss": 0.5796, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3323943661971831, |
|
"grad_norm": 0.6634412406674159, |
|
"learning_rate": 9.996747277509068e-06, |
|
"loss": 0.5569, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3352112676056338, |
|
"grad_norm": 0.6007705739995757, |
|
"learning_rate": 9.996129071205167e-06, |
|
"loss": 0.5747, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 0.5996405607790697, |
|
"learning_rate": 9.99545713673882e-06, |
|
"loss": 0.5637, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3408450704225352, |
|
"grad_norm": 0.5597246678529963, |
|
"learning_rate": 9.994731481335979e-06, |
|
"loss": 0.582, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3436619718309859, |
|
"grad_norm": 0.5507818897049944, |
|
"learning_rate": 9.993952112800314e-06, |
|
"loss": 0.5796, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3464788732394366, |
|
"grad_norm": 0.5350927421515935, |
|
"learning_rate": 9.993119039513124e-06, |
|
"loss": 0.5715, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3492957746478873, |
|
"grad_norm": 0.5239546056575984, |
|
"learning_rate": 9.992232270433239e-06, |
|
"loss": 0.5771, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.352112676056338, |
|
"grad_norm": 0.5126764082447791, |
|
"learning_rate": 9.991291815096941e-06, |
|
"loss": 0.559, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.35492957746478876, |
|
"grad_norm": 0.4969718032918457, |
|
"learning_rate": 9.990297683617844e-06, |
|
"loss": 0.5678, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.35774647887323946, |
|
"grad_norm": 0.5483487768076548, |
|
"learning_rate": 9.989249886686798e-06, |
|
"loss": 0.5579, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.36056338028169016, |
|
"grad_norm": 0.4979281248814945, |
|
"learning_rate": 9.988148435571766e-06, |
|
"loss": 0.527, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.36338028169014086, |
|
"grad_norm": 0.5642480799900738, |
|
"learning_rate": 9.98699334211771e-06, |
|
"loss": 0.5692, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.36619718309859156, |
|
"grad_norm": 0.6026792858515811, |
|
"learning_rate": 9.985784618746455e-06, |
|
"loss": 0.5801, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.36901408450704226, |
|
"grad_norm": 0.5167698331604939, |
|
"learning_rate": 9.984522278456558e-06, |
|
"loss": 0.5539, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.37183098591549296, |
|
"grad_norm": 0.5817335775473784, |
|
"learning_rate": 9.983206334823179e-06, |
|
"loss": 0.5606, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.37464788732394366, |
|
"grad_norm": 0.5276842927516352, |
|
"learning_rate": 9.981836801997916e-06, |
|
"loss": 0.5722, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.37746478873239436, |
|
"grad_norm": 0.5912359974067817, |
|
"learning_rate": 9.98041369470867e-06, |
|
"loss": 0.5614, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.38028169014084506, |
|
"grad_norm": 0.5452723193931854, |
|
"learning_rate": 9.978937028259477e-06, |
|
"loss": 0.5515, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38309859154929576, |
|
"grad_norm": 0.5325105100665509, |
|
"learning_rate": 9.977406818530346e-06, |
|
"loss": 0.5466, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.38591549295774646, |
|
"grad_norm": 0.5647180917474068, |
|
"learning_rate": 9.975823081977089e-06, |
|
"loss": 0.5624, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.38873239436619716, |
|
"grad_norm": 0.5723755319658607, |
|
"learning_rate": 9.974185835631141e-06, |
|
"loss": 0.5626, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39154929577464787, |
|
"grad_norm": 0.5372783352103804, |
|
"learning_rate": 9.972495097099379e-06, |
|
"loss": 0.543, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.39436619718309857, |
|
"grad_norm": 0.5397037680212823, |
|
"learning_rate": 9.970750884563938e-06, |
|
"loss": 0.5648, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3971830985915493, |
|
"grad_norm": 0.5552469428542458, |
|
"learning_rate": 9.968953216782005e-06, |
|
"loss": 0.5765, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5577079940502304, |
|
"learning_rate": 9.96710211308562e-06, |
|
"loss": 0.5582, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4028169014084507, |
|
"grad_norm": 0.5705003650495926, |
|
"learning_rate": 9.965197593381483e-06, |
|
"loss": 0.5666, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4056338028169014, |
|
"grad_norm": 0.4444536833248146, |
|
"learning_rate": 9.963239678150712e-06, |
|
"loss": 0.5609, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4084507042253521, |
|
"grad_norm": 0.6060222914963574, |
|
"learning_rate": 9.961228388448648e-06, |
|
"loss": 0.5603, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4112676056338028, |
|
"grad_norm": 0.49513326472609975, |
|
"learning_rate": 9.959163745904613e-06, |
|
"loss": 0.547, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4140845070422535, |
|
"grad_norm": 0.4629289638803288, |
|
"learning_rate": 9.95704577272169e-06, |
|
"loss": 0.5935, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4169014084507042, |
|
"grad_norm": 0.5136703956308253, |
|
"learning_rate": 9.954874491676474e-06, |
|
"loss": 0.5784, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4197183098591549, |
|
"grad_norm": 0.5669768244151758, |
|
"learning_rate": 9.952649926118827e-06, |
|
"loss": 0.5788, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 0.4669028299546674, |
|
"learning_rate": 9.950372099971635e-06, |
|
"loss": 0.5484, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4253521126760563, |
|
"grad_norm": 0.5798768078016783, |
|
"learning_rate": 9.948041037730546e-06, |
|
"loss": 0.5658, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.428169014084507, |
|
"grad_norm": 0.6319591077591975, |
|
"learning_rate": 9.945656764463706e-06, |
|
"loss": 0.5681, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4309859154929577, |
|
"grad_norm": 0.5566876220468727, |
|
"learning_rate": 9.943219305811486e-06, |
|
"loss": 0.586, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.43380281690140843, |
|
"grad_norm": 0.6780781695543063, |
|
"learning_rate": 9.940728687986219e-06, |
|
"loss": 0.5678, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.43661971830985913, |
|
"grad_norm": 0.5376203276294301, |
|
"learning_rate": 9.938184937771903e-06, |
|
"loss": 0.5576, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4394366197183099, |
|
"grad_norm": 0.589490601515025, |
|
"learning_rate": 9.935588082523917e-06, |
|
"loss": 0.5668, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4422535211267606, |
|
"grad_norm": 0.5240178495748034, |
|
"learning_rate": 9.93293815016874e-06, |
|
"loss": 0.5819, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4450704225352113, |
|
"grad_norm": 0.581734341309261, |
|
"learning_rate": 9.930235169203628e-06, |
|
"loss": 0.5341, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.447887323943662, |
|
"grad_norm": 0.4764372205838014, |
|
"learning_rate": 9.927479168696327e-06, |
|
"loss": 0.5507, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4507042253521127, |
|
"grad_norm": 0.5983293375404063, |
|
"learning_rate": 9.924670178284751e-06, |
|
"loss": 0.6017, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4535211267605634, |
|
"grad_norm": 0.4755074939422403, |
|
"learning_rate": 9.921808228176667e-06, |
|
"loss": 0.572, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4563380281690141, |
|
"grad_norm": 0.549427169617322, |
|
"learning_rate": 9.918893349149361e-06, |
|
"loss": 0.5648, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4591549295774648, |
|
"grad_norm": 0.5473994981650376, |
|
"learning_rate": 9.915925572549325e-06, |
|
"loss": 0.5795, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4619718309859155, |
|
"grad_norm": 0.5344229046079484, |
|
"learning_rate": 9.912904930291902e-06, |
|
"loss": 0.5702, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4647887323943662, |
|
"grad_norm": 0.5248831527184084, |
|
"learning_rate": 9.909831454860953e-06, |
|
"loss": 0.544, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4676056338028169, |
|
"grad_norm": 0.5007058816288649, |
|
"learning_rate": 9.906705179308503e-06, |
|
"loss": 0.575, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4704225352112676, |
|
"grad_norm": 0.6076422198886006, |
|
"learning_rate": 9.903526137254386e-06, |
|
"loss": 0.5637, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4732394366197183, |
|
"grad_norm": 0.44954861939381563, |
|
"learning_rate": 9.900294362885892e-06, |
|
"loss": 0.5465, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.476056338028169, |
|
"grad_norm": 0.6136256906325439, |
|
"learning_rate": 9.897009890957382e-06, |
|
"loss": 0.5411, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4788732394366197, |
|
"grad_norm": 0.49306626666710585, |
|
"learning_rate": 9.893672756789933e-06, |
|
"loss": 0.5695, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48169014084507045, |
|
"grad_norm": 0.5638207147752434, |
|
"learning_rate": 9.890282996270944e-06, |
|
"loss": 0.562, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.48450704225352115, |
|
"grad_norm": 0.5844363211055491, |
|
"learning_rate": 9.886840645853757e-06, |
|
"loss": 0.5713, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.48732394366197185, |
|
"grad_norm": 0.4989681859492393, |
|
"learning_rate": 9.883345742557265e-06, |
|
"loss": 0.5184, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.49014084507042255, |
|
"grad_norm": 0.6798466666249082, |
|
"learning_rate": 9.879798323965512e-06, |
|
"loss": 0.5482, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.49295774647887325, |
|
"grad_norm": 0.5286425298758181, |
|
"learning_rate": 9.876198428227288e-06, |
|
"loss": 0.5738, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.49577464788732395, |
|
"grad_norm": 0.5070463361119862, |
|
"learning_rate": 9.872546094055719e-06, |
|
"loss": 0.559, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.49859154929577465, |
|
"grad_norm": 0.5974495055987761, |
|
"learning_rate": 9.868841360727856e-06, |
|
"loss": 0.5596, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5014084507042254, |
|
"grad_norm": 0.5018737926842496, |
|
"learning_rate": 9.865084268084247e-06, |
|
"loss": 0.5409, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.504225352112676, |
|
"grad_norm": 0.657241380827331, |
|
"learning_rate": 9.861274856528504e-06, |
|
"loss": 0.5534, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5070422535211268, |
|
"grad_norm": 0.5001455789690383, |
|
"learning_rate": 9.857413167026881e-06, |
|
"loss": 0.563, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5098591549295775, |
|
"grad_norm": 0.6635297083955395, |
|
"learning_rate": 9.853499241107827e-06, |
|
"loss": 0.5501, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5126760563380282, |
|
"grad_norm": 0.513999422537837, |
|
"learning_rate": 9.849533120861537e-06, |
|
"loss": 0.5573, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5154929577464789, |
|
"grad_norm": 0.5481718260759659, |
|
"learning_rate": 9.845514848939499e-06, |
|
"loss": 0.5439, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5183098591549296, |
|
"grad_norm": 0.6738586169727009, |
|
"learning_rate": 9.841444468554043e-06, |
|
"loss": 0.5747, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5211267605633803, |
|
"grad_norm": 0.5984162328532678, |
|
"learning_rate": 9.837322023477866e-06, |
|
"loss": 0.5762, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.523943661971831, |
|
"grad_norm": 0.5506392735862736, |
|
"learning_rate": 9.833147558043574e-06, |
|
"loss": 0.5295, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5267605633802817, |
|
"grad_norm": 0.6775281123264153, |
|
"learning_rate": 9.82892111714319e-06, |
|
"loss": 0.5666, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5295774647887324, |
|
"grad_norm": 0.5471145431878144, |
|
"learning_rate": 9.824642746227682e-06, |
|
"loss": 0.566, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.532394366197183, |
|
"grad_norm": 0.7100217523678872, |
|
"learning_rate": 9.820312491306471e-06, |
|
"loss": 0.5637, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5352112676056338, |
|
"grad_norm": 0.6057794786859599, |
|
"learning_rate": 9.815930398946942e-06, |
|
"loss": 0.5507, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5380281690140845, |
|
"grad_norm": 0.626141535245459, |
|
"learning_rate": 9.811496516273925e-06, |
|
"loss": 0.5716, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5408450704225352, |
|
"grad_norm": 0.5527022890637331, |
|
"learning_rate": 9.807010890969214e-06, |
|
"loss": 0.5532, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.543661971830986, |
|
"grad_norm": 0.6020033027406588, |
|
"learning_rate": 9.80247357127103e-06, |
|
"loss": 0.5889, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5464788732394367, |
|
"grad_norm": 0.5199016296325637, |
|
"learning_rate": 9.797884605973523e-06, |
|
"loss": 0.5807, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5492957746478874, |
|
"grad_norm": 0.4523520545534342, |
|
"learning_rate": 9.793244044426233e-06, |
|
"loss": 0.5514, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5521126760563381, |
|
"grad_norm": 0.5379175425838836, |
|
"learning_rate": 9.788551936533561e-06, |
|
"loss": 0.5444, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5549295774647888, |
|
"grad_norm": 0.48732646620471026, |
|
"learning_rate": 9.783808332754242e-06, |
|
"loss": 0.5315, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5577464788732395, |
|
"grad_norm": 0.5557329022733009, |
|
"learning_rate": 9.779013284100791e-06, |
|
"loss": 0.5501, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5605633802816902, |
|
"grad_norm": 0.5373986677295377, |
|
"learning_rate": 9.77416684213896e-06, |
|
"loss": 0.5719, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.5919610806064748, |
|
"learning_rate": 9.769269058987181e-06, |
|
"loss": 0.5202, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5661971830985916, |
|
"grad_norm": 0.51134408429211, |
|
"learning_rate": 9.76431998731601e-06, |
|
"loss": 0.5474, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5690140845070423, |
|
"grad_norm": 0.5283277616415307, |
|
"learning_rate": 9.759319680347556e-06, |
|
"loss": 0.5476, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.571830985915493, |
|
"grad_norm": 0.5803790463856993, |
|
"learning_rate": 9.75426819185491e-06, |
|
"loss": 0.5542, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5746478873239437, |
|
"grad_norm": 0.5817083296083267, |
|
"learning_rate": 9.74916557616157e-06, |
|
"loss": 0.5365, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5774647887323944, |
|
"grad_norm": 0.4771789889476685, |
|
"learning_rate": 9.744011888140847e-06, |
|
"loss": 0.5335, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5802816901408451, |
|
"grad_norm": 0.59366389105907, |
|
"learning_rate": 9.738807183215288e-06, |
|
"loss": 0.5372, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5830985915492958, |
|
"grad_norm": 0.5577403514252908, |
|
"learning_rate": 9.73355151735607e-06, |
|
"loss": 0.5362, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5859154929577465, |
|
"grad_norm": 0.47605232856572827, |
|
"learning_rate": 9.728244947082405e-06, |
|
"loss": 0.5572, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5887323943661972, |
|
"grad_norm": 0.7070690743472441, |
|
"learning_rate": 9.722887529460928e-06, |
|
"loss": 0.548, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5915492957746479, |
|
"grad_norm": 0.5624040595146416, |
|
"learning_rate": 9.717479322105079e-06, |
|
"loss": 0.5444, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5943661971830986, |
|
"grad_norm": 0.5859424494783367, |
|
"learning_rate": 9.712020383174496e-06, |
|
"loss": 0.5249, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5971830985915493, |
|
"grad_norm": 0.6678507938439178, |
|
"learning_rate": 9.70651077137438e-06, |
|
"loss": 0.533, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5328470708977834, |
|
"learning_rate": 9.700950545954867e-06, |
|
"loss": 0.5833, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6028169014084507, |
|
"grad_norm": 0.5362441571989329, |
|
"learning_rate": 9.695339766710382e-06, |
|
"loss": 0.5396, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6056338028169014, |
|
"grad_norm": 0.6434345591744017, |
|
"learning_rate": 9.689678493979011e-06, |
|
"loss": 0.5487, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6084507042253521, |
|
"grad_norm": 0.5167603706269648, |
|
"learning_rate": 9.683966788641848e-06, |
|
"loss": 0.5481, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6112676056338028, |
|
"grad_norm": 0.4686781372062126, |
|
"learning_rate": 9.678204712122328e-06, |
|
"loss": 0.5419, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6140845070422535, |
|
"grad_norm": 0.5760752748606316, |
|
"learning_rate": 9.672392326385582e-06, |
|
"loss": 0.557, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6169014084507042, |
|
"grad_norm": 0.6026666041248488, |
|
"learning_rate": 9.666529693937763e-06, |
|
"loss": 0.5529, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6197183098591549, |
|
"grad_norm": 0.5451736184177465, |
|
"learning_rate": 9.660616877825374e-06, |
|
"loss": 0.5361, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6225352112676056, |
|
"grad_norm": 0.5116565555731337, |
|
"learning_rate": 9.65465394163459e-06, |
|
"loss": 0.5087, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6253521126760564, |
|
"grad_norm": 0.6065841927093307, |
|
"learning_rate": 9.64864094949058e-06, |
|
"loss": 0.5465, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6281690140845071, |
|
"grad_norm": 0.5589673127927728, |
|
"learning_rate": 9.642577966056806e-06, |
|
"loss": 0.5148, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6309859154929578, |
|
"grad_norm": 0.5585400710757196, |
|
"learning_rate": 9.636465056534337e-06, |
|
"loss": 0.5267, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 0.574813185908209, |
|
"learning_rate": 9.630302286661149e-06, |
|
"loss": 0.5357, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6366197183098592, |
|
"grad_norm": 0.5191579985554721, |
|
"learning_rate": 9.624089722711409e-06, |
|
"loss": 0.5426, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6394366197183099, |
|
"grad_norm": 0.6688929345165525, |
|
"learning_rate": 9.617827431494769e-06, |
|
"loss": 0.5524, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6422535211267606, |
|
"grad_norm": 0.46805258630886243, |
|
"learning_rate": 9.611515480355644e-06, |
|
"loss": 0.5499, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6450704225352113, |
|
"grad_norm": 0.6144905582687046, |
|
"learning_rate": 9.605153937172495e-06, |
|
"loss": 0.5632, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.647887323943662, |
|
"grad_norm": 0.52683034829998, |
|
"learning_rate": 9.598742870357089e-06, |
|
"loss": 0.5555, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6507042253521127, |
|
"grad_norm": 0.603840703034481, |
|
"learning_rate": 9.592282348853772e-06, |
|
"loss": 0.5448, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6535211267605634, |
|
"grad_norm": 0.5800521717117073, |
|
"learning_rate": 9.585772442138717e-06, |
|
"loss": 0.5451, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6563380281690141, |
|
"grad_norm": 0.5500966334452324, |
|
"learning_rate": 9.57921322021919e-06, |
|
"loss": 0.5268, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6591549295774648, |
|
"grad_norm": 0.5570376537579396, |
|
"learning_rate": 9.572604753632789e-06, |
|
"loss": 0.5255, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6619718309859155, |
|
"grad_norm": 0.47661383694647175, |
|
"learning_rate": 9.565947113446685e-06, |
|
"loss": 0.5591, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6647887323943662, |
|
"grad_norm": 0.5445391735613945, |
|
"learning_rate": 9.559240371256861e-06, |
|
"loss": 0.5448, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6676056338028169, |
|
"grad_norm": 0.5194324455486823, |
|
"learning_rate": 9.552484599187344e-06, |
|
"loss": 0.5645, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6704225352112676, |
|
"grad_norm": 0.49906141175236074, |
|
"learning_rate": 9.545679869889422e-06, |
|
"loss": 0.5356, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6732394366197183, |
|
"grad_norm": 0.44801193098795, |
|
"learning_rate": 9.538826256540866e-06, |
|
"loss": 0.5525, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.676056338028169, |
|
"grad_norm": 0.4950063463823408, |
|
"learning_rate": 9.53192383284515e-06, |
|
"loss": 0.5251, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6788732394366197, |
|
"grad_norm": 0.4655702750763721, |
|
"learning_rate": 9.524972673030646e-06, |
|
"loss": 0.5532, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6816901408450704, |
|
"grad_norm": 0.5015107720315597, |
|
"learning_rate": 9.517972851849837e-06, |
|
"loss": 0.5363, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6845070422535211, |
|
"grad_norm": 0.4874041712602739, |
|
"learning_rate": 9.510924444578505e-06, |
|
"loss": 0.5138, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6873239436619718, |
|
"grad_norm": 0.5318082437227483, |
|
"learning_rate": 9.503827527014927e-06, |
|
"loss": 0.5568, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6901408450704225, |
|
"grad_norm": 0.5365279955696896, |
|
"learning_rate": 9.496682175479058e-06, |
|
"loss": 0.5568, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6929577464788732, |
|
"grad_norm": 0.4650608267010065, |
|
"learning_rate": 9.489488466811706e-06, |
|
"loss": 0.5294, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6957746478873239, |
|
"grad_norm": 0.502326692197303, |
|
"learning_rate": 9.482246478373713e-06, |
|
"loss": 0.5622, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.6985915492957746, |
|
"grad_norm": 0.5139235141970571, |
|
"learning_rate": 9.47495628804512e-06, |
|
"loss": 0.5309, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7014084507042253, |
|
"grad_norm": 0.49082713927721994, |
|
"learning_rate": 9.467617974224326e-06, |
|
"loss": 0.5612, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 0.4368824557932469, |
|
"learning_rate": 9.460231615827254e-06, |
|
"loss": 0.544, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7070422535211267, |
|
"grad_norm": 0.4590552453029466, |
|
"learning_rate": 9.452797292286486e-06, |
|
"loss": 0.5403, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7098591549295775, |
|
"grad_norm": 0.4679280855458366, |
|
"learning_rate": 9.445315083550432e-06, |
|
"loss": 0.5213, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7126760563380282, |
|
"grad_norm": 0.45716682819250093, |
|
"learning_rate": 9.437785070082448e-06, |
|
"loss": 0.5486, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7154929577464789, |
|
"grad_norm": 0.5570183710120886, |
|
"learning_rate": 9.430207332859981e-06, |
|
"loss": 0.5501, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7183098591549296, |
|
"grad_norm": 0.4245724220850406, |
|
"learning_rate": 9.422581953373704e-06, |
|
"loss": 0.5356, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7211267605633803, |
|
"grad_norm": 0.48956473422005187, |
|
"learning_rate": 9.414909013626624e-06, |
|
"loss": 0.5313, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.723943661971831, |
|
"grad_norm": 0.4582929162313386, |
|
"learning_rate": 9.407188596133212e-06, |
|
"loss": 0.5463, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7267605633802817, |
|
"grad_norm": 0.5115654013081172, |
|
"learning_rate": 9.399420783918518e-06, |
|
"loss": 0.5535, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7295774647887324, |
|
"grad_norm": 0.5269407638629522, |
|
"learning_rate": 9.391605660517268e-06, |
|
"loss": 0.5597, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7323943661971831, |
|
"grad_norm": 0.5438385236535639, |
|
"learning_rate": 9.38374330997297e-06, |
|
"loss": 0.5388, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7352112676056338, |
|
"grad_norm": 0.43926987601895895, |
|
"learning_rate": 9.375833816837012e-06, |
|
"loss": 0.5412, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7380281690140845, |
|
"grad_norm": 0.5836211324705879, |
|
"learning_rate": 9.367877266167752e-06, |
|
"loss": 0.5688, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7408450704225352, |
|
"grad_norm": 0.5045079775368958, |
|
"learning_rate": 9.3598737435296e-06, |
|
"loss": 0.5347, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7436619718309859, |
|
"grad_norm": 0.44338592065262517, |
|
"learning_rate": 9.351823334992105e-06, |
|
"loss": 0.5168, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7464788732394366, |
|
"grad_norm": 0.5235306342556597, |
|
"learning_rate": 9.343726127129023e-06, |
|
"loss": 0.5604, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7492957746478873, |
|
"grad_norm": 0.5172376233558552, |
|
"learning_rate": 9.335582207017385e-06, |
|
"loss": 0.534, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.752112676056338, |
|
"grad_norm": 0.5180203870423422, |
|
"learning_rate": 9.327391662236569e-06, |
|
"loss": 0.5379, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7549295774647887, |
|
"grad_norm": 0.4804092941634994, |
|
"learning_rate": 9.31915458086735e-06, |
|
"loss": 0.5052, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7577464788732394, |
|
"grad_norm": 0.46979320817265685, |
|
"learning_rate": 9.310871051490953e-06, |
|
"loss": 0.5295, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7605633802816901, |
|
"grad_norm": 0.6685486725224626, |
|
"learning_rate": 9.302541163188107e-06, |
|
"loss": 0.5358, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7633802816901408, |
|
"grad_norm": 0.4220043036826099, |
|
"learning_rate": 9.294165005538083e-06, |
|
"loss": 0.5597, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7661971830985915, |
|
"grad_norm": 0.5240690708886699, |
|
"learning_rate": 9.285742668617725e-06, |
|
"loss": 0.5429, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7690140845070422, |
|
"grad_norm": 0.5225378668342543, |
|
"learning_rate": 9.277274243000495e-06, |
|
"loss": 0.5314, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7718309859154929, |
|
"grad_norm": 0.5089658349598899, |
|
"learning_rate": 9.268759819755484e-06, |
|
"loss": 0.5404, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.7746478873239436, |
|
"grad_norm": 0.5081863591984968, |
|
"learning_rate": 9.260199490446444e-06, |
|
"loss": 0.5436, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7774647887323943, |
|
"grad_norm": 0.5291957024091261, |
|
"learning_rate": 9.251593347130796e-06, |
|
"loss": 0.5459, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.780281690140845, |
|
"grad_norm": 0.5546316222295684, |
|
"learning_rate": 9.242941482358646e-06, |
|
"loss": 0.5638, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7830985915492957, |
|
"grad_norm": 0.493107854299303, |
|
"learning_rate": 9.234243989171784e-06, |
|
"loss": 0.5163, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7859154929577464, |
|
"grad_norm": 0.504054739961812, |
|
"learning_rate": 9.225500961102685e-06, |
|
"loss": 0.5403, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7887323943661971, |
|
"grad_norm": 0.46644848784294396, |
|
"learning_rate": 9.216712492173509e-06, |
|
"loss": 0.5319, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7915492957746478, |
|
"grad_norm": 0.48875850819222605, |
|
"learning_rate": 9.20787867689508e-06, |
|
"loss": 0.55, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7943661971830986, |
|
"grad_norm": 0.45374233893805643, |
|
"learning_rate": 9.19899961026588e-06, |
|
"loss": 0.5381, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7971830985915493, |
|
"grad_norm": 0.49081212722421264, |
|
"learning_rate": 9.190075387771014e-06, |
|
"loss": 0.496, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.4898635707497967, |
|
"learning_rate": 9.181106105381201e-06, |
|
"loss": 0.5469, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8028169014084507, |
|
"grad_norm": 0.5742626781139881, |
|
"learning_rate": 9.172091859551727e-06, |
|
"loss": 0.5504, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8056338028169014, |
|
"grad_norm": 0.48424294060741285, |
|
"learning_rate": 9.16303274722141e-06, |
|
"loss": 0.5447, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8084507042253521, |
|
"grad_norm": 0.41774263725909944, |
|
"learning_rate": 9.153928865811567e-06, |
|
"loss": 0.5065, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8112676056338028, |
|
"grad_norm": 0.5366510861069486, |
|
"learning_rate": 9.144780313224955e-06, |
|
"loss": 0.5274, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8140845070422535, |
|
"grad_norm": 0.46697982171080715, |
|
"learning_rate": 9.135587187844727e-06, |
|
"loss": 0.5023, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8169014084507042, |
|
"grad_norm": 0.48443064321010365, |
|
"learning_rate": 9.126349588533367e-06, |
|
"loss": 0.5373, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.819718309859155, |
|
"grad_norm": 0.5135389257533832, |
|
"learning_rate": 9.117067614631629e-06, |
|
"loss": 0.5501, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8225352112676056, |
|
"grad_norm": 0.4983544795249396, |
|
"learning_rate": 9.107741365957473e-06, |
|
"loss": 0.52, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8253521126760563, |
|
"grad_norm": 0.474169348982468, |
|
"learning_rate": 9.098370942804984e-06, |
|
"loss": 0.5522, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.828169014084507, |
|
"grad_norm": 0.553807859933943, |
|
"learning_rate": 9.0889564459433e-06, |
|
"loss": 0.546, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8309859154929577, |
|
"grad_norm": 0.521288457346182, |
|
"learning_rate": 9.079497976615527e-06, |
|
"loss": 0.5039, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8338028169014085, |
|
"grad_norm": 0.42015807275115125, |
|
"learning_rate": 9.069995636537646e-06, |
|
"loss": 0.5077, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8366197183098592, |
|
"grad_norm": 0.5583730579511402, |
|
"learning_rate": 9.060449527897424e-06, |
|
"loss": 0.5588, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8394366197183099, |
|
"grad_norm": 0.5542124651392272, |
|
"learning_rate": 9.050859753353312e-06, |
|
"loss": 0.5217, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8422535211267606, |
|
"grad_norm": 0.48602364869365977, |
|
"learning_rate": 9.041226416033344e-06, |
|
"loss": 0.54, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 0.653714795345656, |
|
"learning_rate": 9.031549619534025e-06, |
|
"loss": 0.5497, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.847887323943662, |
|
"grad_norm": 0.4800359394630375, |
|
"learning_rate": 9.021829467919218e-06, |
|
"loss": 0.5445, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8507042253521127, |
|
"grad_norm": 0.48729987333610464, |
|
"learning_rate": 9.012066065719026e-06, |
|
"loss": 0.5473, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8535211267605634, |
|
"grad_norm": 0.6013760409344134, |
|
"learning_rate": 9.00225951792867e-06, |
|
"loss": 0.5399, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.856338028169014, |
|
"grad_norm": 0.4955229451079562, |
|
"learning_rate": 8.99240993000735e-06, |
|
"loss": 0.5373, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8591549295774648, |
|
"grad_norm": 0.5057254756870588, |
|
"learning_rate": 8.98251740787712e-06, |
|
"loss": 0.5308, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8619718309859155, |
|
"grad_norm": 0.5062350967048924, |
|
"learning_rate": 8.972582057921753e-06, |
|
"loss": 0.5379, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8647887323943662, |
|
"grad_norm": 0.5149008679939511, |
|
"learning_rate": 8.962603986985582e-06, |
|
"loss": 0.5447, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8676056338028169, |
|
"grad_norm": 0.5138417832440646, |
|
"learning_rate": 8.952583302372361e-06, |
|
"loss": 0.541, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8704225352112676, |
|
"grad_norm": 0.4988758497493651, |
|
"learning_rate": 8.942520111844117e-06, |
|
"loss": 0.5456, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8732394366197183, |
|
"grad_norm": 0.4908248371860937, |
|
"learning_rate": 8.932414523619979e-06, |
|
"loss": 0.5417, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8760563380281691, |
|
"grad_norm": 0.47467152147405195, |
|
"learning_rate": 8.922266646375012e-06, |
|
"loss": 0.5369, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8788732394366198, |
|
"grad_norm": 0.5674377586617675, |
|
"learning_rate": 8.912076589239071e-06, |
|
"loss": 0.5408, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8816901408450705, |
|
"grad_norm": 0.5006532529649368, |
|
"learning_rate": 8.901844461795597e-06, |
|
"loss": 0.5355, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8845070422535212, |
|
"grad_norm": 0.44995769221377135, |
|
"learning_rate": 8.89157037408046e-06, |
|
"loss": 0.5471, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.8873239436619719, |
|
"grad_norm": 0.5082076772096817, |
|
"learning_rate": 8.881254436580771e-06, |
|
"loss": 0.5141, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8901408450704226, |
|
"grad_norm": 0.4979524931446426, |
|
"learning_rate": 8.870896760233687e-06, |
|
"loss": 0.5183, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.8929577464788733, |
|
"grad_norm": 0.5033006044158947, |
|
"learning_rate": 8.860497456425226e-06, |
|
"loss": 0.539, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.895774647887324, |
|
"grad_norm": 0.5558963290415088, |
|
"learning_rate": 8.850056636989064e-06, |
|
"loss": 0.5214, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.8985915492957747, |
|
"grad_norm": 0.528178628352138, |
|
"learning_rate": 8.839574414205335e-06, |
|
"loss": 0.5308, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9014084507042254, |
|
"grad_norm": 0.5988043719853473, |
|
"learning_rate": 8.829050900799426e-06, |
|
"loss": 0.5584, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9042253521126761, |
|
"grad_norm": 0.6178729360079042, |
|
"learning_rate": 8.818486209940754e-06, |
|
"loss": 0.5439, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9070422535211268, |
|
"grad_norm": 0.4727007397282943, |
|
"learning_rate": 8.807880455241562e-06, |
|
"loss": 0.5208, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9098591549295775, |
|
"grad_norm": 0.5865056586686292, |
|
"learning_rate": 8.797233750755696e-06, |
|
"loss": 0.5293, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9126760563380282, |
|
"grad_norm": 0.527614643499507, |
|
"learning_rate": 8.786546210977364e-06, |
|
"loss": 0.51, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9154929577464789, |
|
"grad_norm": 0.5253730216506938, |
|
"learning_rate": 8.775817950839925e-06, |
|
"loss": 0.5464, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9183098591549296, |
|
"grad_norm": 0.5120572114997892, |
|
"learning_rate": 8.765049085714634e-06, |
|
"loss": 0.522, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9211267605633803, |
|
"grad_norm": 0.5579538329061854, |
|
"learning_rate": 8.754239731409418e-06, |
|
"loss": 0.515, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.923943661971831, |
|
"grad_norm": 0.4695513820418862, |
|
"learning_rate": 8.743390004167618e-06, |
|
"loss": 0.528, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9267605633802817, |
|
"grad_norm": 0.4744094042874529, |
|
"learning_rate": 8.732500020666748e-06, |
|
"loss": 0.5032, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9295774647887324, |
|
"grad_norm": 0.48310233510167816, |
|
"learning_rate": 8.72156989801723e-06, |
|
"loss": 0.515, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9323943661971831, |
|
"grad_norm": 0.5362590571822845, |
|
"learning_rate": 8.710599753761148e-06, |
|
"loss": 0.5412, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9352112676056338, |
|
"grad_norm": 0.43044803610258253, |
|
"learning_rate": 8.699589705870972e-06, |
|
"loss": 0.5252, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9380281690140845, |
|
"grad_norm": 0.5529255675552455, |
|
"learning_rate": 8.688539872748293e-06, |
|
"loss": 0.554, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9408450704225352, |
|
"grad_norm": 0.4697670392983979, |
|
"learning_rate": 8.677450373222555e-06, |
|
"loss": 0.5249, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9436619718309859, |
|
"grad_norm": 0.466397415688367, |
|
"learning_rate": 8.666321326549771e-06, |
|
"loss": 0.5291, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9464788732394366, |
|
"grad_norm": 0.47431096436255926, |
|
"learning_rate": 8.655152852411242e-06, |
|
"loss": 0.5262, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.9492957746478873, |
|
"grad_norm": 0.42290856961673545, |
|
"learning_rate": 8.643945070912269e-06, |
|
"loss": 0.5437, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.952112676056338, |
|
"grad_norm": 0.5067980558253683, |
|
"learning_rate": 8.632698102580866e-06, |
|
"loss": 0.5487, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9549295774647887, |
|
"grad_norm": 0.49756235790217707, |
|
"learning_rate": 8.621412068366455e-06, |
|
"loss": 0.5411, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.9577464788732394, |
|
"grad_norm": 0.5339747238457127, |
|
"learning_rate": 8.61008708963858e-06, |
|
"loss": 0.5336, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9605633802816902, |
|
"grad_norm": 0.5258263211335326, |
|
"learning_rate": 8.598723288185582e-06, |
|
"loss": 0.5576, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9633802816901409, |
|
"grad_norm": 0.4474885925823666, |
|
"learning_rate": 8.587320786213308e-06, |
|
"loss": 0.5469, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9661971830985916, |
|
"grad_norm": 0.4582164725544087, |
|
"learning_rate": 8.575879706343786e-06, |
|
"loss": 0.5258, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9690140845070423, |
|
"grad_norm": 0.5353459619126049, |
|
"learning_rate": 8.564400171613907e-06, |
|
"loss": 0.5393, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.971830985915493, |
|
"grad_norm": 0.5551427263378275, |
|
"learning_rate": 8.552882305474106e-06, |
|
"loss": 0.511, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9746478873239437, |
|
"grad_norm": 0.46371192783441034, |
|
"learning_rate": 8.541326231787036e-06, |
|
"loss": 0.5562, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9774647887323944, |
|
"grad_norm": 0.49791553086908497, |
|
"learning_rate": 8.529732074826225e-06, |
|
"loss": 0.5317, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9802816901408451, |
|
"grad_norm": 0.48902813408872026, |
|
"learning_rate": 8.518099959274751e-06, |
|
"loss": 0.5368, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9830985915492958, |
|
"grad_norm": 0.5196746962677518, |
|
"learning_rate": 8.506430010223899e-06, |
|
"loss": 0.5328, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9859154929577465, |
|
"grad_norm": 0.512561887896862, |
|
"learning_rate": 8.494722353171807e-06, |
|
"loss": 0.5631, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9887323943661972, |
|
"grad_norm": 0.4688614656001333, |
|
"learning_rate": 8.482977114022133e-06, |
|
"loss": 0.5215, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.9915492957746479, |
|
"grad_norm": 0.4815835702147859, |
|
"learning_rate": 8.471194419082683e-06, |
|
"loss": 0.5175, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.9943661971830986, |
|
"grad_norm": 0.494451086453971, |
|
"learning_rate": 8.459374395064066e-06, |
|
"loss": 0.5347, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.9971830985915493, |
|
"grad_norm": 0.5479082871227343, |
|
"learning_rate": 8.447517169078322e-06, |
|
"loss": 0.5342, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.48128318573777745, |
|
"learning_rate": 8.435622868637562e-06, |
|
"loss": 0.5185, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0028169014084507, |
|
"grad_norm": 0.606217938961181, |
|
"learning_rate": 8.423691621652597e-06, |
|
"loss": 0.5392, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0056338028169014, |
|
"grad_norm": 0.5552799943960874, |
|
"learning_rate": 8.411723556431555e-06, |
|
"loss": 0.4923, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.008450704225352, |
|
"grad_norm": 0.4737481782588327, |
|
"learning_rate": 8.399718801678507e-06, |
|
"loss": 0.5031, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0112676056338028, |
|
"grad_norm": 0.48881408780555036, |
|
"learning_rate": 8.38767748649208e-06, |
|
"loss": 0.4966, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0140845070422535, |
|
"grad_norm": 0.6036514656317372, |
|
"learning_rate": 8.375599740364076e-06, |
|
"loss": 0.5054, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0169014084507042, |
|
"grad_norm": 0.49847239674819194, |
|
"learning_rate": 8.363485693178068e-06, |
|
"loss": 0.4762, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.019718309859155, |
|
"grad_norm": 0.48893728544811266, |
|
"learning_rate": 8.351335475208013e-06, |
|
"loss": 0.4988, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0225352112676056, |
|
"grad_norm": 0.5275361671421374, |
|
"learning_rate": 8.339149217116844e-06, |
|
"loss": 0.5181, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0253521126760563, |
|
"grad_norm": 0.5699229385448366, |
|
"learning_rate": 8.32692704995507e-06, |
|
"loss": 0.4857, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.028169014084507, |
|
"grad_norm": 0.5300003505651588, |
|
"learning_rate": 8.314669105159363e-06, |
|
"loss": 0.5019, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0309859154929577, |
|
"grad_norm": 0.47743877846606136, |
|
"learning_rate": 8.302375514551147e-06, |
|
"loss": 0.5031, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0338028169014084, |
|
"grad_norm": 0.46610287314823595, |
|
"learning_rate": 8.29004641033518e-06, |
|
"loss": 0.484, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.036619718309859, |
|
"grad_norm": 0.5070608182387246, |
|
"learning_rate": 8.277681925098133e-06, |
|
"loss": 0.4919, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0394366197183098, |
|
"grad_norm": 0.5155487269874641, |
|
"learning_rate": 8.26528219180716e-06, |
|
"loss": 0.4964, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.0422535211267605, |
|
"grad_norm": 0.48047795719589964, |
|
"learning_rate": 8.252847343808473e-06, |
|
"loss": 0.51, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0450704225352112, |
|
"grad_norm": 0.5029419422977898, |
|
"learning_rate": 8.240377514825906e-06, |
|
"loss": 0.5096, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.047887323943662, |
|
"grad_norm": 0.44300572811738526, |
|
"learning_rate": 8.227872838959478e-06, |
|
"loss": 0.5085, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.0507042253521126, |
|
"grad_norm": 0.46826085995665145, |
|
"learning_rate": 8.215333450683945e-06, |
|
"loss": 0.4927, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.0535211267605633, |
|
"grad_norm": 0.4992785109969445, |
|
"learning_rate": 8.202759484847366e-06, |
|
"loss": 0.5237, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.056338028169014, |
|
"grad_norm": 0.4583536646462298, |
|
"learning_rate": 8.19015107666964e-06, |
|
"loss": 0.4778, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0591549295774647, |
|
"grad_norm": 0.4629467013193756, |
|
"learning_rate": 8.177508361741063e-06, |
|
"loss": 0.5039, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.0619718309859154, |
|
"grad_norm": 0.513616475500221, |
|
"learning_rate": 8.164831476020856e-06, |
|
"loss": 0.4902, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.064788732394366, |
|
"grad_norm": 0.4564209134623684, |
|
"learning_rate": 8.15212055583572e-06, |
|
"loss": 0.4807, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.0676056338028168, |
|
"grad_norm": 0.5148849912441339, |
|
"learning_rate": 8.139375737878356e-06, |
|
"loss": 0.519, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.0704225352112675, |
|
"grad_norm": 0.48962484473993256, |
|
"learning_rate": 8.126597159206002e-06, |
|
"loss": 0.4926, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0732394366197182, |
|
"grad_norm": 0.558804039498056, |
|
"learning_rate": 8.113784957238957e-06, |
|
"loss": 0.4954, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.076056338028169, |
|
"grad_norm": 0.5328343330334975, |
|
"learning_rate": 8.100939269759103e-06, |
|
"loss": 0.5051, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0788732394366196, |
|
"grad_norm": 0.5293202978458649, |
|
"learning_rate": 8.088060234908425e-06, |
|
"loss": 0.5173, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0816901408450703, |
|
"grad_norm": 0.4837225369393582, |
|
"learning_rate": 8.075147991187521e-06, |
|
"loss": 0.4951, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.084507042253521, |
|
"grad_norm": 0.5444025643060015, |
|
"learning_rate": 8.062202677454123e-06, |
|
"loss": 0.4944, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.0873239436619717, |
|
"grad_norm": 0.5058337046267952, |
|
"learning_rate": 8.04922443292159e-06, |
|
"loss": 0.4799, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0901408450704226, |
|
"grad_norm": 0.4803075581095113, |
|
"learning_rate": 8.036213397157418e-06, |
|
"loss": 0.4983, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0929577464788733, |
|
"grad_norm": 0.5913188229351856, |
|
"learning_rate": 8.02316971008174e-06, |
|
"loss": 0.493, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.095774647887324, |
|
"grad_norm": 0.5178060100858961, |
|
"learning_rate": 8.01009351196582e-06, |
|
"loss": 0.4793, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.0985915492957747, |
|
"grad_norm": 0.45957843759530875, |
|
"learning_rate": 7.996984943430544e-06, |
|
"loss": 0.4802, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1014084507042254, |
|
"grad_norm": 0.49107943563289536, |
|
"learning_rate": 7.983844145444908e-06, |
|
"loss": 0.4638, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1042253521126761, |
|
"grad_norm": 0.5865391082035357, |
|
"learning_rate": 7.970671259324502e-06, |
|
"loss": 0.4934, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1070422535211268, |
|
"grad_norm": 0.46382700522033943, |
|
"learning_rate": 7.957466426729995e-06, |
|
"loss": 0.507, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1098591549295775, |
|
"grad_norm": 0.5235903123213329, |
|
"learning_rate": 7.944229789665595e-06, |
|
"loss": 0.5016, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1126760563380282, |
|
"grad_norm": 0.531704325443622, |
|
"learning_rate": 7.930961490477546e-06, |
|
"loss": 0.5434, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.115492957746479, |
|
"grad_norm": 0.4854203383807877, |
|
"learning_rate": 7.917661671852582e-06, |
|
"loss": 0.4627, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.1183098591549296, |
|
"grad_norm": 0.5733239722826652, |
|
"learning_rate": 7.904330476816391e-06, |
|
"loss": 0.4888, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1211267605633803, |
|
"grad_norm": 0.43773395249537966, |
|
"learning_rate": 7.890968048732091e-06, |
|
"loss": 0.4672, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.123943661971831, |
|
"grad_norm": 0.5302196621641707, |
|
"learning_rate": 7.877574531298666e-06, |
|
"loss": 0.505, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1267605633802817, |
|
"grad_norm": 0.46253916299905623, |
|
"learning_rate": 7.864150068549446e-06, |
|
"loss": 0.497, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1295774647887324, |
|
"grad_norm": 0.5434762640908769, |
|
"learning_rate": 7.850694804850538e-06, |
|
"loss": 0.514, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1323943661971831, |
|
"grad_norm": 0.49589560162074636, |
|
"learning_rate": 7.837208884899283e-06, |
|
"loss": 0.4966, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1352112676056338, |
|
"grad_norm": 0.4542407372958917, |
|
"learning_rate": 7.823692453722701e-06, |
|
"loss": 0.4867, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.1380281690140845, |
|
"grad_norm": 0.5461139604315557, |
|
"learning_rate": 7.810145656675923e-06, |
|
"loss": 0.5076, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.1408450704225352, |
|
"grad_norm": 0.5275707669715958, |
|
"learning_rate": 7.796568639440635e-06, |
|
"loss": 0.496, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.143661971830986, |
|
"grad_norm": 0.5140145559296189, |
|
"learning_rate": 7.782961548023515e-06, |
|
"loss": 0.4936, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.1464788732394366, |
|
"grad_norm": 0.5073549508475139, |
|
"learning_rate": 7.769324528754653e-06, |
|
"loss": 0.4817, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.1492957746478873, |
|
"grad_norm": 0.5143992829310441, |
|
"learning_rate": 7.755657728285979e-06, |
|
"loss": 0.5059, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.152112676056338, |
|
"grad_norm": 0.4274674431264774, |
|
"learning_rate": 7.741961293589693e-06, |
|
"loss": 0.5168, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.1549295774647887, |
|
"grad_norm": 0.5046190101945648, |
|
"learning_rate": 7.728235371956678e-06, |
|
"loss": 0.5144, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1577464788732394, |
|
"grad_norm": 0.535811089052687, |
|
"learning_rate": 7.714480110994922e-06, |
|
"loss": 0.49, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.1605633802816901, |
|
"grad_norm": 0.4873836309283388, |
|
"learning_rate": 7.700695658627924e-06, |
|
"loss": 0.5049, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.1633802816901408, |
|
"grad_norm": 0.4727081045199869, |
|
"learning_rate": 7.686882163093106e-06, |
|
"loss": 0.4918, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.1661971830985915, |
|
"grad_norm": 0.4826333573880563, |
|
"learning_rate": 7.673039772940218e-06, |
|
"loss": 0.5199, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.1690140845070423, |
|
"grad_norm": 0.4529237270053036, |
|
"learning_rate": 7.659168637029746e-06, |
|
"loss": 0.4911, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.171830985915493, |
|
"grad_norm": 0.4703668307323843, |
|
"learning_rate": 7.6452689045313e-06, |
|
"loss": 0.4977, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.1746478873239437, |
|
"grad_norm": 0.43037435321183687, |
|
"learning_rate": 7.631340724922023e-06, |
|
"loss": 0.4884, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.1774647887323944, |
|
"grad_norm": 0.44390781109329, |
|
"learning_rate": 7.617384247984973e-06, |
|
"loss": 0.5007, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.180281690140845, |
|
"grad_norm": 0.4303155635436162, |
|
"learning_rate": 7.603399623807519e-06, |
|
"loss": 0.5152, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.1830985915492958, |
|
"grad_norm": 0.4472432608010433, |
|
"learning_rate": 7.589387002779722e-06, |
|
"loss": 0.4785, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1859154929577465, |
|
"grad_norm": 0.4418018518887449, |
|
"learning_rate": 7.575346535592721e-06, |
|
"loss": 0.4925, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.1887323943661972, |
|
"grad_norm": 0.4358234713383654, |
|
"learning_rate": 7.561278373237108e-06, |
|
"loss": 0.4921, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.1915492957746479, |
|
"grad_norm": 0.4758680421804988, |
|
"learning_rate": 7.547182667001316e-06, |
|
"loss": 0.4841, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.1943661971830986, |
|
"grad_norm": 0.4632861244285043, |
|
"learning_rate": 7.5330595684699735e-06, |
|
"loss": 0.4705, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.1971830985915493, |
|
"grad_norm": 0.5122268438926686, |
|
"learning_rate": 7.5189092295222945e-06, |
|
"loss": 0.5051, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.4542095177777043, |
|
"learning_rate": 7.504731802330427e-06, |
|
"loss": 0.4856, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.2028169014084507, |
|
"grad_norm": 0.45093644422410417, |
|
"learning_rate": 7.49052743935783e-06, |
|
"loss": 0.5039, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2056338028169014, |
|
"grad_norm": 0.4698670288362548, |
|
"learning_rate": 7.476296293357626e-06, |
|
"loss": 0.4796, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.208450704225352, |
|
"grad_norm": 0.45611072085962195, |
|
"learning_rate": 7.462038517370962e-06, |
|
"loss": 0.5061, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2112676056338028, |
|
"grad_norm": 0.44879399555766897, |
|
"learning_rate": 7.4477542647253645e-06, |
|
"loss": 0.4949, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2140845070422535, |
|
"grad_norm": 0.4568738470061427, |
|
"learning_rate": 7.4334436890330845e-06, |
|
"loss": 0.5085, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2169014084507042, |
|
"grad_norm": 0.4910023364458397, |
|
"learning_rate": 7.4191069441894555e-06, |
|
"loss": 0.5022, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2197183098591549, |
|
"grad_norm": 0.47245665007543136, |
|
"learning_rate": 7.404744184371229e-06, |
|
"loss": 0.4761, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.2225352112676056, |
|
"grad_norm": 0.4412540818413418, |
|
"learning_rate": 7.39035556403492e-06, |
|
"loss": 0.4868, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.2253521126760563, |
|
"grad_norm": 0.4332333517309346, |
|
"learning_rate": 7.375941237915151e-06, |
|
"loss": 0.4847, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.228169014084507, |
|
"grad_norm": 0.42462733830523613, |
|
"learning_rate": 7.361501361022978e-06, |
|
"loss": 0.4801, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.2309859154929577, |
|
"grad_norm": 0.41408929063500605, |
|
"learning_rate": 7.347036088644232e-06, |
|
"loss": 0.4633, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2338028169014084, |
|
"grad_norm": 0.4446141488697224, |
|
"learning_rate": 7.3325455763378465e-06, |
|
"loss": 0.4874, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.236619718309859, |
|
"grad_norm": 0.39028343567183377, |
|
"learning_rate": 7.318029979934181e-06, |
|
"loss": 0.476, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2394366197183098, |
|
"grad_norm": 0.40546374556808, |
|
"learning_rate": 7.303489455533352e-06, |
|
"loss": 0.4991, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2422535211267607, |
|
"grad_norm": 0.4406508743990959, |
|
"learning_rate": 7.288924159503549e-06, |
|
"loss": 0.5082, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.2450704225352114, |
|
"grad_norm": 0.41221274812070907, |
|
"learning_rate": 7.274334248479353e-06, |
|
"loss": 0.4857, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.247887323943662, |
|
"grad_norm": 0.4400052134717122, |
|
"learning_rate": 7.259719879360054e-06, |
|
"loss": 0.4856, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.2507042253521128, |
|
"grad_norm": 0.4363861455466759, |
|
"learning_rate": 7.2450812093079695e-06, |
|
"loss": 0.4717, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.2535211267605635, |
|
"grad_norm": 0.40915950116360555, |
|
"learning_rate": 7.2304183957467385e-06, |
|
"loss": 0.4922, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.2563380281690142, |
|
"grad_norm": 0.42971830207213996, |
|
"learning_rate": 7.215731596359645e-06, |
|
"loss": 0.4968, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.2591549295774649, |
|
"grad_norm": 0.46604427981545354, |
|
"learning_rate": 7.201020969087913e-06, |
|
"loss": 0.5066, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.2619718309859156, |
|
"grad_norm": 0.39614011900749, |
|
"learning_rate": 7.18628667212901e-06, |
|
"loss": 0.4934, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.2647887323943663, |
|
"grad_norm": 0.506857483129988, |
|
"learning_rate": 7.17152886393495e-06, |
|
"loss": 0.4836, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.267605633802817, |
|
"grad_norm": 0.43280979811122466, |
|
"learning_rate": 7.15674770321058e-06, |
|
"loss": 0.4822, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2704225352112677, |
|
"grad_norm": 0.4291718303065121, |
|
"learning_rate": 7.141943348911885e-06, |
|
"loss": 0.4801, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.2732394366197184, |
|
"grad_norm": 0.43238195279590513, |
|
"learning_rate": 7.127115960244269e-06, |
|
"loss": 0.4922, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.276056338028169, |
|
"grad_norm": 0.45348003112601215, |
|
"learning_rate": 7.112265696660848e-06, |
|
"loss": 0.4924, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.2788732394366198, |
|
"grad_norm": 0.42234280818079467, |
|
"learning_rate": 7.0973927178607335e-06, |
|
"loss": 0.4938, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.2816901408450705, |
|
"grad_norm": 0.4396695903135667, |
|
"learning_rate": 7.0824971837873154e-06, |
|
"loss": 0.5022, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.2845070422535212, |
|
"grad_norm": 0.433995835377117, |
|
"learning_rate": 7.067579254626543e-06, |
|
"loss": 0.4907, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.287323943661972, |
|
"grad_norm": 0.3865805137760006, |
|
"learning_rate": 7.0526390908052e-06, |
|
"loss": 0.4989, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.2901408450704226, |
|
"grad_norm": 0.4181083182911281, |
|
"learning_rate": 7.037676852989182e-06, |
|
"loss": 0.4764, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2929577464788733, |
|
"grad_norm": 0.41081992730322214, |
|
"learning_rate": 7.022692702081766e-06, |
|
"loss": 0.4744, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.295774647887324, |
|
"grad_norm": 0.40712445897933586, |
|
"learning_rate": 7.007686799221882e-06, |
|
"loss": 0.5068, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2985915492957747, |
|
"grad_norm": 0.4116920369510292, |
|
"learning_rate": 6.992659305782381e-06, |
|
"loss": 0.4652, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3014084507042254, |
|
"grad_norm": 0.4574121168612027, |
|
"learning_rate": 6.977610383368296e-06, |
|
"loss": 0.5119, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.304225352112676, |
|
"grad_norm": 0.41865241421774196, |
|
"learning_rate": 6.9625401938151085e-06, |
|
"loss": 0.481, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.3070422535211268, |
|
"grad_norm": 0.42466950286656874, |
|
"learning_rate": 6.947448899187004e-06, |
|
"loss": 0.501, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3098591549295775, |
|
"grad_norm": 0.438824118486777, |
|
"learning_rate": 6.932336661775132e-06, |
|
"loss": 0.4961, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3126760563380282, |
|
"grad_norm": 0.4090033280279816, |
|
"learning_rate": 6.91720364409586e-06, |
|
"loss": 0.4741, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.315492957746479, |
|
"grad_norm": 0.411571188038661, |
|
"learning_rate": 6.902050008889024e-06, |
|
"loss": 0.4946, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.3183098591549296, |
|
"grad_norm": 0.40048050848825795, |
|
"learning_rate": 6.886875919116184e-06, |
|
"loss": 0.4598, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3211267605633803, |
|
"grad_norm": 0.4355207784396914, |
|
"learning_rate": 6.871681537958862e-06, |
|
"loss": 0.5023, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.323943661971831, |
|
"grad_norm": 0.40362801280200744, |
|
"learning_rate": 6.856467028816797e-06, |
|
"loss": 0.4994, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3267605633802817, |
|
"grad_norm": 0.46270660160714394, |
|
"learning_rate": 6.841232555306181e-06, |
|
"loss": 0.5157, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.3295774647887324, |
|
"grad_norm": 0.42249423161984545, |
|
"learning_rate": 6.825978281257905e-06, |
|
"loss": 0.4798, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.332394366197183, |
|
"grad_norm": 0.45487323092224735, |
|
"learning_rate": 6.810704370715791e-06, |
|
"loss": 0.4795, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3352112676056338, |
|
"grad_norm": 0.3919330678327191, |
|
"learning_rate": 6.795410987934834e-06, |
|
"loss": 0.4898, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.3380281690140845, |
|
"grad_norm": 0.4353751489003626, |
|
"learning_rate": 6.780098297379427e-06, |
|
"loss": 0.4935, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.3408450704225352, |
|
"grad_norm": 0.4629533116217877, |
|
"learning_rate": 6.764766463721605e-06, |
|
"loss": 0.4862, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.343661971830986, |
|
"grad_norm": 0.4294849971678444, |
|
"learning_rate": 6.7494156518392625e-06, |
|
"loss": 0.4841, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.3464788732394366, |
|
"grad_norm": 0.4469982481591703, |
|
"learning_rate": 6.734046026814388e-06, |
|
"loss": 0.4761, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.3492957746478873, |
|
"grad_norm": 0.4431535514689644, |
|
"learning_rate": 6.718657753931284e-06, |
|
"loss": 0.4877, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.352112676056338, |
|
"grad_norm": 0.44477269709374434, |
|
"learning_rate": 6.70325099867479e-06, |
|
"loss": 0.492, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.3549295774647887, |
|
"grad_norm": 0.44065649629461523, |
|
"learning_rate": 6.687825926728506e-06, |
|
"loss": 0.4779, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.3577464788732394, |
|
"grad_norm": 0.46680428200178004, |
|
"learning_rate": 6.672382703973011e-06, |
|
"loss": 0.4979, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.36056338028169, |
|
"grad_norm": 0.3753392719693149, |
|
"learning_rate": 6.65692149648407e-06, |
|
"loss": 0.4957, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.3633802816901408, |
|
"grad_norm": 0.4464455137831182, |
|
"learning_rate": 6.641442470530866e-06, |
|
"loss": 0.5009, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.3661971830985915, |
|
"grad_norm": 0.4558973598726014, |
|
"learning_rate": 6.62594579257419e-06, |
|
"loss": 0.4862, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.3690140845070422, |
|
"grad_norm": 0.4244329051781463, |
|
"learning_rate": 6.610431629264669e-06, |
|
"loss": 0.4982, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.371830985915493, |
|
"grad_norm": 0.42900553114070356, |
|
"learning_rate": 6.594900147440963e-06, |
|
"loss": 0.5149, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.3746478873239436, |
|
"grad_norm": 0.46393543297393147, |
|
"learning_rate": 6.579351514127976e-06, |
|
"loss": 0.504, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.3774647887323943, |
|
"grad_norm": 0.5280930975377756, |
|
"learning_rate": 6.56378589653506e-06, |
|
"loss": 0.5027, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.380281690140845, |
|
"grad_norm": 0.44081231018896067, |
|
"learning_rate": 6.548203462054211e-06, |
|
"loss": 0.4974, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3830985915492957, |
|
"grad_norm": 0.45229210902770534, |
|
"learning_rate": 6.5326043782582785e-06, |
|
"loss": 0.5099, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.3859154929577464, |
|
"grad_norm": 0.4116450533577545, |
|
"learning_rate": 6.516988812899154e-06, |
|
"loss": 0.4901, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.388732394366197, |
|
"grad_norm": 0.4941893352727475, |
|
"learning_rate": 6.501356933905973e-06, |
|
"loss": 0.4838, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.3915492957746478, |
|
"grad_norm": 0.4473632155182213, |
|
"learning_rate": 6.485708909383306e-06, |
|
"loss": 0.5047, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.3943661971830985, |
|
"grad_norm": 0.43832979886064743, |
|
"learning_rate": 6.4700449076093515e-06, |
|
"loss": 0.4723, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3971830985915492, |
|
"grad_norm": 0.44987425974821665, |
|
"learning_rate": 6.454365097034127e-06, |
|
"loss": 0.4766, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.40420889188372705, |
|
"learning_rate": 6.43866964627766e-06, |
|
"loss": 0.4755, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.4028169014084506, |
|
"grad_norm": 0.44487770905469926, |
|
"learning_rate": 6.422958724128169e-06, |
|
"loss": 0.4899, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.4056338028169013, |
|
"grad_norm": 0.435043357418449, |
|
"learning_rate": 6.40723249954025e-06, |
|
"loss": 0.4972, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.408450704225352, |
|
"grad_norm": 0.4627668121771765, |
|
"learning_rate": 6.391491141633064e-06, |
|
"loss": 0.4798, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4112676056338027, |
|
"grad_norm": 0.4070202392162525, |
|
"learning_rate": 6.375734819688514e-06, |
|
"loss": 0.4898, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.4140845070422534, |
|
"grad_norm": 0.4213085739571462, |
|
"learning_rate": 6.359963703149424e-06, |
|
"loss": 0.4904, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.4169014084507041, |
|
"grad_norm": 0.5558578544437808, |
|
"learning_rate": 6.344177961617719e-06, |
|
"loss": 0.4795, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.4197183098591548, |
|
"grad_norm": 0.4638056979361732, |
|
"learning_rate": 6.3283777648526035e-06, |
|
"loss": 0.4944, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4225352112676055, |
|
"grad_norm": 0.44058978200259, |
|
"learning_rate": 6.312563282768729e-06, |
|
"loss": 0.4805, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4253521126760562, |
|
"grad_norm": 0.4712406089314399, |
|
"learning_rate": 6.296734685434373e-06, |
|
"loss": 0.518, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.428169014084507, |
|
"grad_norm": 0.50303011695748, |
|
"learning_rate": 6.280892143069607e-06, |
|
"loss": 0.5078, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.4309859154929576, |
|
"grad_norm": 0.44401567500787253, |
|
"learning_rate": 6.265035826044467e-06, |
|
"loss": 0.4986, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.4338028169014083, |
|
"grad_norm": 0.45802099251252676, |
|
"learning_rate": 6.2491659048771215e-06, |
|
"loss": 0.4962, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.436619718309859, |
|
"grad_norm": 0.45900922989672704, |
|
"learning_rate": 6.233282550232036e-06, |
|
"loss": 0.4701, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.43943661971831, |
|
"grad_norm": 0.4104785858315547, |
|
"learning_rate": 6.217385932918141e-06, |
|
"loss": 0.4708, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.4422535211267606, |
|
"grad_norm": 0.4287021977710135, |
|
"learning_rate": 6.201476223886993e-06, |
|
"loss": 0.4788, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.4450704225352113, |
|
"grad_norm": 0.4428659512764066, |
|
"learning_rate": 6.185553594230934e-06, |
|
"loss": 0.5327, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.447887323943662, |
|
"grad_norm": 0.39989248720275716, |
|
"learning_rate": 6.169618215181256e-06, |
|
"loss": 0.4913, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.4507042253521127, |
|
"grad_norm": 0.4108837509780523, |
|
"learning_rate": 6.153670258106356e-06, |
|
"loss": 0.4905, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.4535211267605634, |
|
"grad_norm": 0.41341293933805207, |
|
"learning_rate": 6.137709894509898e-06, |
|
"loss": 0.5087, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.4563380281690141, |
|
"grad_norm": 0.42267988314579, |
|
"learning_rate": 6.121737296028959e-06, |
|
"loss": 0.4888, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.4591549295774648, |
|
"grad_norm": 0.4511096410354952, |
|
"learning_rate": 6.105752634432195e-06, |
|
"loss": 0.5161, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.4619718309859155, |
|
"grad_norm": 0.41852288904862534, |
|
"learning_rate": 6.089756081617987e-06, |
|
"loss": 0.4951, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.4647887323943662, |
|
"grad_norm": 0.47057489166038396, |
|
"learning_rate": 6.073747809612591e-06, |
|
"loss": 0.4713, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.467605633802817, |
|
"grad_norm": 0.40256689019530845, |
|
"learning_rate": 6.057727990568294e-06, |
|
"loss": 0.4911, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.4704225352112676, |
|
"grad_norm": 0.4510853893653407, |
|
"learning_rate": 6.041696796761558e-06, |
|
"loss": 0.5099, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.4732394366197183, |
|
"grad_norm": 0.38944003217951545, |
|
"learning_rate": 6.025654400591167e-06, |
|
"loss": 0.4921, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.476056338028169, |
|
"grad_norm": 0.3940764581766545, |
|
"learning_rate": 6.009600974576378e-06, |
|
"loss": 0.5037, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.4788732394366197, |
|
"grad_norm": 0.4026807626178998, |
|
"learning_rate": 5.9935366913550615e-06, |
|
"loss": 0.4854, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.4816901408450704, |
|
"grad_norm": 0.44187668092733434, |
|
"learning_rate": 5.977461723681845e-06, |
|
"loss": 0.4702, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.4845070422535211, |
|
"grad_norm": 0.43769176532376947, |
|
"learning_rate": 5.961376244426256e-06, |
|
"loss": 0.5001, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.4873239436619718, |
|
"grad_norm": 0.47287330095754926, |
|
"learning_rate": 5.945280426570862e-06, |
|
"loss": 0.4876, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4901408450704225, |
|
"grad_norm": 0.428038527885128, |
|
"learning_rate": 5.929174443209416e-06, |
|
"loss": 0.5191, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.4929577464788732, |
|
"grad_norm": 0.4021956491054695, |
|
"learning_rate": 5.913058467544984e-06, |
|
"loss": 0.4842, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.495774647887324, |
|
"grad_norm": 0.448727233648992, |
|
"learning_rate": 5.8969326728880964e-06, |
|
"loss": 0.4866, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.4985915492957746, |
|
"grad_norm": 0.43755912059789587, |
|
"learning_rate": 5.880797232654869e-06, |
|
"loss": 0.4771, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.5014084507042254, |
|
"grad_norm": 0.4425452078492814, |
|
"learning_rate": 5.8646523203651514e-06, |
|
"loss": 0.5125, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.504225352112676, |
|
"grad_norm": 0.48313466807852506, |
|
"learning_rate": 5.848498109640652e-06, |
|
"loss": 0.4872, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.5070422535211268, |
|
"grad_norm": 0.47985626406062337, |
|
"learning_rate": 5.832334774203076e-06, |
|
"loss": 0.5105, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5098591549295775, |
|
"grad_norm": 0.371740721272159, |
|
"learning_rate": 5.8161624878722545e-06, |
|
"loss": 0.4455, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.5126760563380282, |
|
"grad_norm": 0.4869175302843986, |
|
"learning_rate": 5.799981424564275e-06, |
|
"loss": 0.4962, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.5154929577464789, |
|
"grad_norm": 0.4814836246010178, |
|
"learning_rate": 5.7837917582896145e-06, |
|
"loss": 0.5113, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.5183098591549296, |
|
"grad_norm": 0.41287497701292597, |
|
"learning_rate": 5.767593663151265e-06, |
|
"loss": 0.4781, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.5211267605633803, |
|
"grad_norm": 0.5074496740090907, |
|
"learning_rate": 5.751387313342863e-06, |
|
"loss": 0.5008, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.523943661971831, |
|
"grad_norm": 0.423624893721716, |
|
"learning_rate": 5.735172883146813e-06, |
|
"loss": 0.5054, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.5267605633802817, |
|
"grad_norm": 0.3983612120471499, |
|
"learning_rate": 5.718950546932418e-06, |
|
"loss": 0.5119, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.5295774647887324, |
|
"grad_norm": 0.45566126306718424, |
|
"learning_rate": 5.702720479154001e-06, |
|
"loss": 0.4944, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.532394366197183, |
|
"grad_norm": 0.45415425821004124, |
|
"learning_rate": 5.686482854349029e-06, |
|
"loss": 0.5087, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.5352112676056338, |
|
"grad_norm": 0.418311733394643, |
|
"learning_rate": 5.6702378471362394e-06, |
|
"loss": 0.4881, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.5380281690140845, |
|
"grad_norm": 0.4618061508199756, |
|
"learning_rate": 5.653985632213758e-06, |
|
"loss": 0.4851, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.5408450704225352, |
|
"grad_norm": 0.4113372184265852, |
|
"learning_rate": 5.637726384357222e-06, |
|
"loss": 0.4928, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.543661971830986, |
|
"grad_norm": 0.44563040764122475, |
|
"learning_rate": 5.621460278417901e-06, |
|
"loss": 0.4677, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.5464788732394368, |
|
"grad_norm": 0.47662643350622824, |
|
"learning_rate": 5.605187489320815e-06, |
|
"loss": 0.4969, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.5492957746478875, |
|
"grad_norm": 0.40408999101715254, |
|
"learning_rate": 5.588908192062858e-06, |
|
"loss": 0.4897, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5521126760563382, |
|
"grad_norm": 0.46478585160154373, |
|
"learning_rate": 5.572622561710906e-06, |
|
"loss": 0.477, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.5549295774647889, |
|
"grad_norm": 0.43970878015188386, |
|
"learning_rate": 5.556330773399948e-06, |
|
"loss": 0.4798, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.5577464788732396, |
|
"grad_norm": 0.4103181086924883, |
|
"learning_rate": 5.54003300233119e-06, |
|
"loss": 0.4804, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.5605633802816903, |
|
"grad_norm": 0.4476411103725844, |
|
"learning_rate": 5.52372942377018e-06, |
|
"loss": 0.5, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.563380281690141, |
|
"grad_norm": 0.42059975361762, |
|
"learning_rate": 5.507420213044915e-06, |
|
"loss": 0.4914, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.5661971830985917, |
|
"grad_norm": 0.4227459859510044, |
|
"learning_rate": 5.491105545543966e-06, |
|
"loss": 0.479, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.5690140845070424, |
|
"grad_norm": 0.44685775608699185, |
|
"learning_rate": 5.474785596714581e-06, |
|
"loss": 0.4847, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.571830985915493, |
|
"grad_norm": 0.40998327682835917, |
|
"learning_rate": 5.458460542060807e-06, |
|
"loss": 0.4553, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.5746478873239438, |
|
"grad_norm": 0.41568075131048016, |
|
"learning_rate": 5.442130557141595e-06, |
|
"loss": 0.4961, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.5774647887323945, |
|
"grad_norm": 0.4130684664753457, |
|
"learning_rate": 5.42579581756892e-06, |
|
"loss": 0.4901, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.5802816901408452, |
|
"grad_norm": 0.4265493303005442, |
|
"learning_rate": 5.409456499005883e-06, |
|
"loss": 0.473, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.5830985915492959, |
|
"grad_norm": 0.41210586655876397, |
|
"learning_rate": 5.393112777164834e-06, |
|
"loss": 0.4836, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.5859154929577466, |
|
"grad_norm": 0.4120627899473988, |
|
"learning_rate": 5.376764827805468e-06, |
|
"loss": 0.4754, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.5887323943661973, |
|
"grad_norm": 0.39881714263247103, |
|
"learning_rate": 5.36041282673295e-06, |
|
"loss": 0.4734, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.591549295774648, |
|
"grad_norm": 0.4225245003542499, |
|
"learning_rate": 5.3440569497960126e-06, |
|
"loss": 0.5043, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.5943661971830987, |
|
"grad_norm": 0.4437638279308617, |
|
"learning_rate": 5.32769737288507e-06, |
|
"loss": 0.4894, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.5971830985915494, |
|
"grad_norm": 0.4085471752813184, |
|
"learning_rate": 5.3113342719303266e-06, |
|
"loss": 0.4826, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.44059126729044296, |
|
"learning_rate": 5.294967822899882e-06, |
|
"loss": 0.4842, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.6028169014084508, |
|
"grad_norm": 0.4785027561352671, |
|
"learning_rate": 5.278598201797844e-06, |
|
"loss": 0.5029, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.6056338028169015, |
|
"grad_norm": 0.39271046798403997, |
|
"learning_rate": 5.262225584662431e-06, |
|
"loss": 0.4926, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6084507042253522, |
|
"grad_norm": 0.42328960022000284, |
|
"learning_rate": 5.245850147564081e-06, |
|
"loss": 0.4865, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.611267605633803, |
|
"grad_norm": 0.45771191563988584, |
|
"learning_rate": 5.229472066603558e-06, |
|
"loss": 0.4764, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.6140845070422536, |
|
"grad_norm": 0.3867972365885545, |
|
"learning_rate": 5.213091517910056e-06, |
|
"loss": 0.4901, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.6169014084507043, |
|
"grad_norm": 0.3873468850604546, |
|
"learning_rate": 5.196708677639311e-06, |
|
"loss": 0.4973, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.619718309859155, |
|
"grad_norm": 0.36928905724949573, |
|
"learning_rate": 5.180323721971696e-06, |
|
"loss": 0.4653, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.6225352112676057, |
|
"grad_norm": 0.3855607659359868, |
|
"learning_rate": 5.163936827110342e-06, |
|
"loss": 0.4721, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.6253521126760564, |
|
"grad_norm": 0.4009987591689016, |
|
"learning_rate": 5.1475481692792235e-06, |
|
"loss": 0.5147, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.628169014084507, |
|
"grad_norm": 0.4041174927757299, |
|
"learning_rate": 5.131157924721285e-06, |
|
"loss": 0.4924, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.6309859154929578, |
|
"grad_norm": 0.3747685368431732, |
|
"learning_rate": 5.1147662696965254e-06, |
|
"loss": 0.4825, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.6338028169014085, |
|
"grad_norm": 0.4088818726448478, |
|
"learning_rate": 5.098373380480114e-06, |
|
"loss": 0.5083, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.6366197183098592, |
|
"grad_norm": 0.39419824398459724, |
|
"learning_rate": 5.081979433360498e-06, |
|
"loss": 0.4935, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.63943661971831, |
|
"grad_norm": 0.39469243440406887, |
|
"learning_rate": 5.065584604637492e-06, |
|
"loss": 0.4576, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.6422535211267606, |
|
"grad_norm": 0.414636635926328, |
|
"learning_rate": 5.0491890706204e-06, |
|
"loss": 0.4842, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.6450704225352113, |
|
"grad_norm": 0.43475222695638494, |
|
"learning_rate": 5.0327930076261065e-06, |
|
"loss": 0.4557, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.647887323943662, |
|
"grad_norm": 0.3513983093557574, |
|
"learning_rate": 5.0163965919771855e-06, |
|
"loss": 0.4656, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.6507042253521127, |
|
"grad_norm": 0.4628906667358179, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5117, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.6535211267605634, |
|
"grad_norm": 0.3907044515953839, |
|
"learning_rate": 4.983603408022817e-06, |
|
"loss": 0.4928, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.656338028169014, |
|
"grad_norm": 0.4554985455916155, |
|
"learning_rate": 4.967206992373894e-06, |
|
"loss": 0.4957, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.6591549295774648, |
|
"grad_norm": 0.40510939357693265, |
|
"learning_rate": 4.9508109293796015e-06, |
|
"loss": 0.4916, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.6619718309859155, |
|
"grad_norm": 0.48375130926157284, |
|
"learning_rate": 4.9344153953625095e-06, |
|
"loss": 0.4853, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6647887323943662, |
|
"grad_norm": 0.4937685950817898, |
|
"learning_rate": 4.918020566639505e-06, |
|
"loss": 0.4605, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.667605633802817, |
|
"grad_norm": 0.3931913794777372, |
|
"learning_rate": 4.901626619519888e-06, |
|
"loss": 0.4924, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.6704225352112676, |
|
"grad_norm": 0.4757949719740506, |
|
"learning_rate": 4.885233730303475e-06, |
|
"loss": 0.479, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.6732394366197183, |
|
"grad_norm": 0.46379868898689014, |
|
"learning_rate": 4.868842075278717e-06, |
|
"loss": 0.488, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.676056338028169, |
|
"grad_norm": 0.3881777800605308, |
|
"learning_rate": 4.852451830720777e-06, |
|
"loss": 0.4899, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.6788732394366197, |
|
"grad_norm": 0.4367216334986508, |
|
"learning_rate": 4.83606317288966e-06, |
|
"loss": 0.4881, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.6816901408450704, |
|
"grad_norm": 0.40564739437861447, |
|
"learning_rate": 4.819676278028305e-06, |
|
"loss": 0.4787, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.684507042253521, |
|
"grad_norm": 0.4252084939655642, |
|
"learning_rate": 4.803291322360691e-06, |
|
"loss": 0.4763, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.6873239436619718, |
|
"grad_norm": 0.41871066030523396, |
|
"learning_rate": 4.7869084820899455e-06, |
|
"loss": 0.4977, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.6901408450704225, |
|
"grad_norm": 0.43384887842924486, |
|
"learning_rate": 4.7705279333964435e-06, |
|
"loss": 0.5063, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6929577464788732, |
|
"grad_norm": 0.40350171077067093, |
|
"learning_rate": 4.75414985243592e-06, |
|
"loss": 0.4924, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.695774647887324, |
|
"grad_norm": 0.36329329985026526, |
|
"learning_rate": 4.73777441533757e-06, |
|
"loss": 0.476, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.6985915492957746, |
|
"grad_norm": 0.37282495932190163, |
|
"learning_rate": 4.721401798202157e-06, |
|
"loss": 0.4959, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.7014084507042253, |
|
"grad_norm": 0.393392739621724, |
|
"learning_rate": 4.70503217710012e-06, |
|
"loss": 0.4813, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.704225352112676, |
|
"grad_norm": 0.42627654440084173, |
|
"learning_rate": 4.688665728069676e-06, |
|
"loss": 0.5041, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7070422535211267, |
|
"grad_norm": 0.42156592000121074, |
|
"learning_rate": 4.67230262711493e-06, |
|
"loss": 0.4639, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.7098591549295774, |
|
"grad_norm": 0.37153582403937235, |
|
"learning_rate": 4.655943050203987e-06, |
|
"loss": 0.4933, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.712676056338028, |
|
"grad_norm": 0.3618346879431781, |
|
"learning_rate": 4.63958717326705e-06, |
|
"loss": 0.4731, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.7154929577464788, |
|
"grad_norm": 0.4189867275686855, |
|
"learning_rate": 4.623235172194532e-06, |
|
"loss": 0.48, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.7183098591549295, |
|
"grad_norm": 0.3544074995168518, |
|
"learning_rate": 4.606887222835168e-06, |
|
"loss": 0.5005, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.7211267605633802, |
|
"grad_norm": 0.3582551183885596, |
|
"learning_rate": 4.590543500994118e-06, |
|
"loss": 0.4851, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.723943661971831, |
|
"grad_norm": 0.3682415876242219, |
|
"learning_rate": 4.574204182431082e-06, |
|
"loss": 0.4855, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.7267605633802816, |
|
"grad_norm": 0.3978386232505586, |
|
"learning_rate": 4.557869442858406e-06, |
|
"loss": 0.5023, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.7295774647887323, |
|
"grad_norm": 0.4037363077292521, |
|
"learning_rate": 4.541539457939194e-06, |
|
"loss": 0.4939, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.732394366197183, |
|
"grad_norm": 0.4426913520017231, |
|
"learning_rate": 4.525214403285421e-06, |
|
"loss": 0.4967, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.7352112676056337, |
|
"grad_norm": 0.41553539084217284, |
|
"learning_rate": 4.5088944544560355e-06, |
|
"loss": 0.4898, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.7380281690140844, |
|
"grad_norm": 0.476787686400504, |
|
"learning_rate": 4.4925797869550865e-06, |
|
"loss": 0.4747, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.7408450704225351, |
|
"grad_norm": 0.44174190027116295, |
|
"learning_rate": 4.476270576229823e-06, |
|
"loss": 0.5051, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.7436619718309858, |
|
"grad_norm": 0.46363672177794424, |
|
"learning_rate": 4.459966997668812e-06, |
|
"loss": 0.4665, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.7464788732394365, |
|
"grad_norm": 0.35788247114147254, |
|
"learning_rate": 4.443669226600053e-06, |
|
"loss": 0.4902, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.7492957746478872, |
|
"grad_norm": 0.3992859755370345, |
|
"learning_rate": 4.427377438289095e-06, |
|
"loss": 0.4975, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.752112676056338, |
|
"grad_norm": 0.4255514294644944, |
|
"learning_rate": 4.411091807937143e-06, |
|
"loss": 0.481, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.7549295774647886, |
|
"grad_norm": 0.4210089823530368, |
|
"learning_rate": 4.3948125106791854e-06, |
|
"loss": 0.4771, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.7577464788732393, |
|
"grad_norm": 0.3906604723995445, |
|
"learning_rate": 4.3785397215821e-06, |
|
"loss": 0.46, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.76056338028169, |
|
"grad_norm": 0.3874823235263679, |
|
"learning_rate": 4.362273615642779e-06, |
|
"loss": 0.5002, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.7633802816901407, |
|
"grad_norm": 0.3901434101586165, |
|
"learning_rate": 4.346014367786243e-06, |
|
"loss": 0.4843, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.7661971830985914, |
|
"grad_norm": 0.37499873527013877, |
|
"learning_rate": 4.329762152863762e-06, |
|
"loss": 0.4842, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.7690140845070421, |
|
"grad_norm": 0.4151492021167118, |
|
"learning_rate": 4.313517145650973e-06, |
|
"loss": 0.4958, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.7718309859154928, |
|
"grad_norm": 0.35051417440023813, |
|
"learning_rate": 4.297279520846002e-06, |
|
"loss": 0.5058, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.7746478873239435, |
|
"grad_norm": 0.3916747258132481, |
|
"learning_rate": 4.281049453067584e-06, |
|
"loss": 0.4669, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7774647887323942, |
|
"grad_norm": 0.377379436744131, |
|
"learning_rate": 4.264827116853189e-06, |
|
"loss": 0.4962, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.780281690140845, |
|
"grad_norm": 0.39444463909483923, |
|
"learning_rate": 4.248612686657139e-06, |
|
"loss": 0.4776, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.7830985915492956, |
|
"grad_norm": 0.4224260284589088, |
|
"learning_rate": 4.232406336848734e-06, |
|
"loss": 0.4909, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.7859154929577463, |
|
"grad_norm": 0.3831008836136032, |
|
"learning_rate": 4.2162082417103855e-06, |
|
"loss": 0.4927, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.788732394366197, |
|
"grad_norm": 0.37541501249859294, |
|
"learning_rate": 4.200018575435726e-06, |
|
"loss": 0.4863, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.7915492957746477, |
|
"grad_norm": 0.4326695660097326, |
|
"learning_rate": 4.183837512127747e-06, |
|
"loss": 0.4989, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.7943661971830986, |
|
"grad_norm": 0.38404026793531815, |
|
"learning_rate": 4.167665225796925e-06, |
|
"loss": 0.4592, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.7971830985915493, |
|
"grad_norm": 0.4162688038323106, |
|
"learning_rate": 4.1515018903593485e-06, |
|
"loss": 0.4807, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.4278400322795686, |
|
"learning_rate": 4.135347679634849e-06, |
|
"loss": 0.4815, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.8028169014084507, |
|
"grad_norm": 0.40764842876105817, |
|
"learning_rate": 4.119202767345132e-06, |
|
"loss": 0.5008, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8056338028169014, |
|
"grad_norm": 0.42716211859348846, |
|
"learning_rate": 4.103067327111905e-06, |
|
"loss": 0.4922, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.8084507042253521, |
|
"grad_norm": 0.45566066287532137, |
|
"learning_rate": 4.086941532455017e-06, |
|
"loss": 0.4967, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.8112676056338028, |
|
"grad_norm": 0.44375813526255004, |
|
"learning_rate": 4.070825556790587e-06, |
|
"loss": 0.497, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.8140845070422535, |
|
"grad_norm": 0.38051175030231293, |
|
"learning_rate": 4.05471957342914e-06, |
|
"loss": 0.4742, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.8169014084507042, |
|
"grad_norm": 0.42575239690946376, |
|
"learning_rate": 4.0386237555737476e-06, |
|
"loss": 0.5048, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.819718309859155, |
|
"grad_norm": 0.37163029748077847, |
|
"learning_rate": 4.022538276318156e-06, |
|
"loss": 0.4837, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.8225352112676056, |
|
"grad_norm": 0.45165782259780374, |
|
"learning_rate": 4.006463308644939e-06, |
|
"loss": 0.4921, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.8253521126760563, |
|
"grad_norm": 0.455006281945947, |
|
"learning_rate": 3.990399025423622e-06, |
|
"loss": 0.4583, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.828169014084507, |
|
"grad_norm": 0.4079794708534016, |
|
"learning_rate": 3.974345599408833e-06, |
|
"loss": 0.5085, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.8309859154929577, |
|
"grad_norm": 0.40865373266918004, |
|
"learning_rate": 3.958303203238443e-06, |
|
"loss": 0.476, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.8338028169014085, |
|
"grad_norm": 0.4184354451550438, |
|
"learning_rate": 3.942272009431707e-06, |
|
"loss": 0.4589, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.8366197183098592, |
|
"grad_norm": 0.49668041654230144, |
|
"learning_rate": 3.92625219038741e-06, |
|
"loss": 0.5024, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.8394366197183099, |
|
"grad_norm": 0.3487062852877808, |
|
"learning_rate": 3.910243918382015e-06, |
|
"loss": 0.4815, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.8422535211267606, |
|
"grad_norm": 0.4120415306391501, |
|
"learning_rate": 3.894247365567806e-06, |
|
"loss": 0.4795, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.8450704225352113, |
|
"grad_norm": 0.4477361963322955, |
|
"learning_rate": 3.878262703971043e-06, |
|
"loss": 0.4957, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.847887323943662, |
|
"grad_norm": 0.429401841610224, |
|
"learning_rate": 3.8622901054901045e-06, |
|
"loss": 0.4841, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.8507042253521127, |
|
"grad_norm": 0.3942562812288331, |
|
"learning_rate": 3.846329741893646e-06, |
|
"loss": 0.4795, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.8535211267605634, |
|
"grad_norm": 0.426170025102285, |
|
"learning_rate": 3.830381784818746e-06, |
|
"loss": 0.5068, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.856338028169014, |
|
"grad_norm": 0.38804163901918054, |
|
"learning_rate": 3.814446405769069e-06, |
|
"loss": 0.4835, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.8591549295774648, |
|
"grad_norm": 0.3540589772905739, |
|
"learning_rate": 3.7985237761130077e-06, |
|
"loss": 0.4929, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.8619718309859155, |
|
"grad_norm": 0.42357776241410305, |
|
"learning_rate": 3.7826140670818597e-06, |
|
"loss": 0.504, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.8647887323943662, |
|
"grad_norm": 0.3729025016669601, |
|
"learning_rate": 3.766717449767965e-06, |
|
"loss": 0.4777, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.8676056338028169, |
|
"grad_norm": 0.3889169801805458, |
|
"learning_rate": 3.75083409512288e-06, |
|
"loss": 0.515, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.8704225352112676, |
|
"grad_norm": 0.38907663234098605, |
|
"learning_rate": 3.7349641739555342e-06, |
|
"loss": 0.4881, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.8732394366197183, |
|
"grad_norm": 0.38796929384532813, |
|
"learning_rate": 3.719107856930395e-06, |
|
"loss": 0.4917, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.8760563380281692, |
|
"grad_norm": 0.3986305355401847, |
|
"learning_rate": 3.703265314565629e-06, |
|
"loss": 0.5089, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.8788732394366199, |
|
"grad_norm": 0.347146307991803, |
|
"learning_rate": 3.687436717231273e-06, |
|
"loss": 0.4766, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.8816901408450706, |
|
"grad_norm": 0.40539428665594573, |
|
"learning_rate": 3.6716222351473986e-06, |
|
"loss": 0.5021, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.8845070422535213, |
|
"grad_norm": 0.38842403728574076, |
|
"learning_rate": 3.6558220383822824e-06, |
|
"loss": 0.5044, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.887323943661972, |
|
"grad_norm": 0.39954575200830245, |
|
"learning_rate": 3.6400362968505776e-06, |
|
"loss": 0.4882, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8901408450704227, |
|
"grad_norm": 0.41445903325977895, |
|
"learning_rate": 3.6242651803114876e-06, |
|
"loss": 0.4994, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.8929577464788734, |
|
"grad_norm": 0.38275982826221505, |
|
"learning_rate": 3.6085088583669368e-06, |
|
"loss": 0.4899, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.895774647887324, |
|
"grad_norm": 0.3574431084312652, |
|
"learning_rate": 3.59276750045975e-06, |
|
"loss": 0.4829, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.8985915492957748, |
|
"grad_norm": 0.3788946557536076, |
|
"learning_rate": 3.5770412758718317e-06, |
|
"loss": 0.4916, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.9014084507042255, |
|
"grad_norm": 0.42134326422621154, |
|
"learning_rate": 3.56133035372234e-06, |
|
"loss": 0.4608, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.9042253521126762, |
|
"grad_norm": 0.4177754485864574, |
|
"learning_rate": 3.545634902965873e-06, |
|
"loss": 0.4973, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.9070422535211269, |
|
"grad_norm": 0.37996812865010277, |
|
"learning_rate": 3.52995509239065e-06, |
|
"loss": 0.4847, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.9098591549295776, |
|
"grad_norm": 0.36341888349352885, |
|
"learning_rate": 3.514291090616696e-06, |
|
"loss": 0.4928, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.9126760563380283, |
|
"grad_norm": 0.3586723850556533, |
|
"learning_rate": 3.4986430660940283e-06, |
|
"loss": 0.4589, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.915492957746479, |
|
"grad_norm": 0.41045984913560996, |
|
"learning_rate": 3.483011187100847e-06, |
|
"loss": 0.4768, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.9183098591549297, |
|
"grad_norm": 0.3797315202775918, |
|
"learning_rate": 3.4673956217417228e-06, |
|
"loss": 0.4816, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.9211267605633804, |
|
"grad_norm": 0.39011901642864444, |
|
"learning_rate": 3.451796537945791e-06, |
|
"loss": 0.5053, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.923943661971831, |
|
"grad_norm": 0.4068699128633064, |
|
"learning_rate": 3.4362141034649434e-06, |
|
"loss": 0.4715, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.9267605633802818, |
|
"grad_norm": 0.40734934524843114, |
|
"learning_rate": 3.4206484858720267e-06, |
|
"loss": 0.485, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.9295774647887325, |
|
"grad_norm": 0.4177722988348098, |
|
"learning_rate": 3.4050998525590406e-06, |
|
"loss": 0.4897, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.9323943661971832, |
|
"grad_norm": 0.35772887554141514, |
|
"learning_rate": 3.389568370735332e-06, |
|
"loss": 0.4658, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.935211267605634, |
|
"grad_norm": 0.382070573657635, |
|
"learning_rate": 3.374054207425811e-06, |
|
"loss": 0.452, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.9380281690140846, |
|
"grad_norm": 0.37319547891516613, |
|
"learning_rate": 3.3585575294691355e-06, |
|
"loss": 0.4777, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.9408450704225353, |
|
"grad_norm": 0.37543346487112356, |
|
"learning_rate": 3.3430785035159297e-06, |
|
"loss": 0.4954, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.943661971830986, |
|
"grad_norm": 0.4343137243095733, |
|
"learning_rate": 3.327617296026991e-06, |
|
"loss": 0.4757, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.9464788732394367, |
|
"grad_norm": 0.4113551727906176, |
|
"learning_rate": 3.3121740732714953e-06, |
|
"loss": 0.4908, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.9492957746478874, |
|
"grad_norm": 0.4150173708033274, |
|
"learning_rate": 3.296749001325212e-06, |
|
"loss": 0.4855, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.952112676056338, |
|
"grad_norm": 0.39769439667103085, |
|
"learning_rate": 3.2813422460687176e-06, |
|
"loss": 0.5082, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.9549295774647888, |
|
"grad_norm": 0.37230509461533484, |
|
"learning_rate": 3.265953973185613e-06, |
|
"loss": 0.4771, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.9577464788732395, |
|
"grad_norm": 0.3921749151984685, |
|
"learning_rate": 3.250584348160738e-06, |
|
"loss": 0.4842, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.9605633802816902, |
|
"grad_norm": 0.39815159280717677, |
|
"learning_rate": 3.235233536278396e-06, |
|
"loss": 0.4876, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.963380281690141, |
|
"grad_norm": 0.4078684990729484, |
|
"learning_rate": 3.2199017026205744e-06, |
|
"loss": 0.459, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.9661971830985916, |
|
"grad_norm": 0.4003910325405714, |
|
"learning_rate": 3.204589012065168e-06, |
|
"loss": 0.4895, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.9690140845070423, |
|
"grad_norm": 0.4098497177330705, |
|
"learning_rate": 3.1892956292842103e-06, |
|
"loss": 0.5077, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.971830985915493, |
|
"grad_norm": 0.411890736572199, |
|
"learning_rate": 3.1740217187420947e-06, |
|
"loss": 0.4671, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9746478873239437, |
|
"grad_norm": 0.37605262945047896, |
|
"learning_rate": 3.158767444693819e-06, |
|
"loss": 0.4934, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.9774647887323944, |
|
"grad_norm": 0.4119794673411224, |
|
"learning_rate": 3.1435329711832042e-06, |
|
"loss": 0.482, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.980281690140845, |
|
"grad_norm": 0.38815162205345255, |
|
"learning_rate": 3.1283184620411387e-06, |
|
"loss": 0.482, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.9830985915492958, |
|
"grad_norm": 0.3849898691145318, |
|
"learning_rate": 3.1131240808838175e-06, |
|
"loss": 0.4847, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.9859154929577465, |
|
"grad_norm": 0.36667596347362735, |
|
"learning_rate": 3.0979499911109768e-06, |
|
"loss": 0.4687, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.9887323943661972, |
|
"grad_norm": 0.3778514259931487, |
|
"learning_rate": 3.082796355904142e-06, |
|
"loss": 0.4577, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.991549295774648, |
|
"grad_norm": 0.3770331164124935, |
|
"learning_rate": 3.06766333822487e-06, |
|
"loss": 0.5002, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.9943661971830986, |
|
"grad_norm": 0.34604698551150276, |
|
"learning_rate": 3.052551100812998e-06, |
|
"loss": 0.4875, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.9971830985915493, |
|
"grad_norm": 0.3631995089390127, |
|
"learning_rate": 3.0374598061848936e-06, |
|
"loss": 0.5077, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.37299214959244026, |
|
"learning_rate": 3.022389616631706e-06, |
|
"loss": 0.4918, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.0028169014084507, |
|
"grad_norm": 0.4263218715974723, |
|
"learning_rate": 3.0073406942176214e-06, |
|
"loss": 0.4662, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.0056338028169014, |
|
"grad_norm": 0.388952450173628, |
|
"learning_rate": 2.9923132007781206e-06, |
|
"loss": 0.4583, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.008450704225352, |
|
"grad_norm": 0.3716230490095555, |
|
"learning_rate": 2.977307297918235e-06, |
|
"loss": 0.452, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.011267605633803, |
|
"grad_norm": 0.3472437688297708, |
|
"learning_rate": 2.9623231470108194e-06, |
|
"loss": 0.465, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.0140845070422535, |
|
"grad_norm": 0.40516520267399664, |
|
"learning_rate": 2.947360909194801e-06, |
|
"loss": 0.4757, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.016901408450704, |
|
"grad_norm": 0.4275349577946564, |
|
"learning_rate": 2.9324207453734575e-06, |
|
"loss": 0.4477, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.019718309859155, |
|
"grad_norm": 0.4150387078995605, |
|
"learning_rate": 2.917502816212685e-06, |
|
"loss": 0.4516, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.0225352112676056, |
|
"grad_norm": 0.4258725320630231, |
|
"learning_rate": 2.902607282139267e-06, |
|
"loss": 0.4485, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.0253521126760563, |
|
"grad_norm": 0.37906226457482306, |
|
"learning_rate": 2.8877343033391523e-06, |
|
"loss": 0.4458, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.028169014084507, |
|
"grad_norm": 0.4017366735290866, |
|
"learning_rate": 2.8728840397557324e-06, |
|
"loss": 0.4533, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.0309859154929577, |
|
"grad_norm": 0.4636710305852195, |
|
"learning_rate": 2.8580566510881158e-06, |
|
"loss": 0.4513, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.0338028169014084, |
|
"grad_norm": 0.40403337840200826, |
|
"learning_rate": 2.8432522967894217e-06, |
|
"loss": 0.4466, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.036619718309859, |
|
"grad_norm": 0.3839776520892033, |
|
"learning_rate": 2.8284711360650517e-06, |
|
"loss": 0.4582, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.03943661971831, |
|
"grad_norm": 0.3957971040790153, |
|
"learning_rate": 2.8137133278709917e-06, |
|
"loss": 0.4673, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.0422535211267605, |
|
"grad_norm": 0.4511172081773506, |
|
"learning_rate": 2.7989790309120895e-06, |
|
"loss": 0.4661, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.045070422535211, |
|
"grad_norm": 0.42321807954410795, |
|
"learning_rate": 2.7842684036403557e-06, |
|
"loss": 0.4596, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.047887323943662, |
|
"grad_norm": 0.4016823973163568, |
|
"learning_rate": 2.769581604253262e-06, |
|
"loss": 0.4572, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.0507042253521126, |
|
"grad_norm": 0.35731206857199754, |
|
"learning_rate": 2.754918790692031e-06, |
|
"loss": 0.4496, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.0535211267605633, |
|
"grad_norm": 0.3760759988713752, |
|
"learning_rate": 2.7402801206399454e-06, |
|
"loss": 0.4627, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.056338028169014, |
|
"grad_norm": 0.36312345984169353, |
|
"learning_rate": 2.7256657515206487e-06, |
|
"loss": 0.4558, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.0591549295774647, |
|
"grad_norm": 0.3660535112601631, |
|
"learning_rate": 2.7110758404964534e-06, |
|
"loss": 0.4655, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.0619718309859154, |
|
"grad_norm": 0.39210025358373574, |
|
"learning_rate": 2.6965105444666496e-06, |
|
"loss": 0.4609, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.064788732394366, |
|
"grad_norm": 0.32546400486520366, |
|
"learning_rate": 2.6819700200658204e-06, |
|
"loss": 0.4469, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.067605633802817, |
|
"grad_norm": 0.41216043411606135, |
|
"learning_rate": 2.667454423662156e-06, |
|
"loss": 0.4491, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.0704225352112675, |
|
"grad_norm": 0.37826214280359827, |
|
"learning_rate": 2.6529639113557694e-06, |
|
"loss": 0.473, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.073239436619718, |
|
"grad_norm": 0.3872061532277833, |
|
"learning_rate": 2.638498638977024e-06, |
|
"loss": 0.4462, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.076056338028169, |
|
"grad_norm": 0.35777642541297033, |
|
"learning_rate": 2.6240587620848512e-06, |
|
"loss": 0.4545, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.0788732394366196, |
|
"grad_norm": 0.3771646503674181, |
|
"learning_rate": 2.6096444359650817e-06, |
|
"loss": 0.4546, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.0816901408450703, |
|
"grad_norm": 0.369712629785824, |
|
"learning_rate": 2.595255815628774e-06, |
|
"loss": 0.426, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.084507042253521, |
|
"grad_norm": 0.3948340446779534, |
|
"learning_rate": 2.580893055810545e-06, |
|
"loss": 0.4396, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.0873239436619717, |
|
"grad_norm": 0.362237396122223, |
|
"learning_rate": 2.5665563109669155e-06, |
|
"loss": 0.4572, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.0901408450704224, |
|
"grad_norm": 0.35377063841196926, |
|
"learning_rate": 2.5522457352746368e-06, |
|
"loss": 0.4484, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.092957746478873, |
|
"grad_norm": 0.37686845068586383, |
|
"learning_rate": 2.5379614826290384e-06, |
|
"loss": 0.4659, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.095774647887324, |
|
"grad_norm": 0.35203810361621596, |
|
"learning_rate": 2.5237037066423747e-06, |
|
"loss": 0.4473, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.0985915492957745, |
|
"grad_norm": 0.3591372556166686, |
|
"learning_rate": 2.509472560642171e-06, |
|
"loss": 0.4597, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.101408450704225, |
|
"grad_norm": 0.3708152047071068, |
|
"learning_rate": 2.495268197669573e-06, |
|
"loss": 0.453, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.104225352112676, |
|
"grad_norm": 0.3902747213222871, |
|
"learning_rate": 2.481090770477706e-06, |
|
"loss": 0.436, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.1070422535211266, |
|
"grad_norm": 0.3887109344687831, |
|
"learning_rate": 2.466940431530026e-06, |
|
"loss": 0.4295, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.1098591549295773, |
|
"grad_norm": 0.3958388402882215, |
|
"learning_rate": 2.4528173329986855e-06, |
|
"loss": 0.4481, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.112676056338028, |
|
"grad_norm": 0.35749353391449623, |
|
"learning_rate": 2.438721626762892e-06, |
|
"loss": 0.443, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.1154929577464787, |
|
"grad_norm": 0.39476997529270275, |
|
"learning_rate": 2.424653464407281e-06, |
|
"loss": 0.4846, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.1183098591549294, |
|
"grad_norm": 0.3575214685484145, |
|
"learning_rate": 2.4106129972202793e-06, |
|
"loss": 0.4464, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.12112676056338, |
|
"grad_norm": 0.3768049884226859, |
|
"learning_rate": 2.3966003761924816e-06, |
|
"loss": 0.466, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.123943661971831, |
|
"grad_norm": 0.4018878366687561, |
|
"learning_rate": 2.382615752015028e-06, |
|
"loss": 0.4601, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.1267605633802815, |
|
"grad_norm": 0.40719785808826275, |
|
"learning_rate": 2.3686592750779788e-06, |
|
"loss": 0.4441, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.129577464788732, |
|
"grad_norm": 0.3440143155418727, |
|
"learning_rate": 2.3547310954687018e-06, |
|
"loss": 0.477, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.132394366197183, |
|
"grad_norm": 0.35333903604899564, |
|
"learning_rate": 2.340831362970257e-06, |
|
"loss": 0.462, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.1352112676056336, |
|
"grad_norm": 0.40408511951747045, |
|
"learning_rate": 2.326960227059784e-06, |
|
"loss": 0.4593, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.1380281690140843, |
|
"grad_norm": 0.38619122486168744, |
|
"learning_rate": 2.313117836906897e-06, |
|
"loss": 0.4373, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.140845070422535, |
|
"grad_norm": 0.411758597632312, |
|
"learning_rate": 2.2993043413720784e-06, |
|
"loss": 0.4655, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.1436619718309857, |
|
"grad_norm": 0.3576311304270278, |
|
"learning_rate": 2.28551988900508e-06, |
|
"loss": 0.4465, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.1464788732394364, |
|
"grad_norm": 0.3357550677548821, |
|
"learning_rate": 2.271764628043324e-06, |
|
"loss": 0.4769, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.149295774647887, |
|
"grad_norm": 0.37905960503822633, |
|
"learning_rate": 2.258038706410311e-06, |
|
"loss": 0.4319, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.152112676056338, |
|
"grad_norm": 0.385174993785128, |
|
"learning_rate": 2.2443422717140246e-06, |
|
"loss": 0.4548, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.1549295774647885, |
|
"grad_norm": 0.37165354454365357, |
|
"learning_rate": 2.2306754712453504e-06, |
|
"loss": 0.4635, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.1577464788732392, |
|
"grad_norm": 0.36455549759311245, |
|
"learning_rate": 2.217038451976485e-06, |
|
"loss": 0.4446, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.1605633802816904, |
|
"grad_norm": 0.36114868485444035, |
|
"learning_rate": 2.203431360559365e-06, |
|
"loss": 0.4464, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.1633802816901406, |
|
"grad_norm": 0.37195290477502, |
|
"learning_rate": 2.1898543433240787e-06, |
|
"loss": 0.4566, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.1661971830985918, |
|
"grad_norm": 0.35611893011878004, |
|
"learning_rate": 2.1763075462773002e-06, |
|
"loss": 0.4405, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.169014084507042, |
|
"grad_norm": 0.3568351811276538, |
|
"learning_rate": 2.1627911151007176e-06, |
|
"loss": 0.4468, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.171830985915493, |
|
"grad_norm": 0.37439678125672593, |
|
"learning_rate": 2.149305195149463e-06, |
|
"loss": 0.4479, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.1746478873239434, |
|
"grad_norm": 0.34449509209968837, |
|
"learning_rate": 2.135849931450555e-06, |
|
"loss": 0.4565, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.1774647887323946, |
|
"grad_norm": 0.3525754840068429, |
|
"learning_rate": 2.1224254687013347e-06, |
|
"loss": 0.4648, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.1802816901408453, |
|
"grad_norm": 0.3533752171342116, |
|
"learning_rate": 2.1090319512679115e-06, |
|
"loss": 0.4611, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.183098591549296, |
|
"grad_norm": 0.37886186027245655, |
|
"learning_rate": 2.0956695231836094e-06, |
|
"loss": 0.4603, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.1859154929577467, |
|
"grad_norm": 0.3733834136596295, |
|
"learning_rate": 2.0823383281474202e-06, |
|
"loss": 0.4351, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.1887323943661974, |
|
"grad_norm": 0.3728175824778924, |
|
"learning_rate": 2.0690385095224557e-06, |
|
"loss": 0.4788, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.191549295774648, |
|
"grad_norm": 0.33738192713218457, |
|
"learning_rate": 2.0557702103344078e-06, |
|
"loss": 0.4627, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.1943661971830988, |
|
"grad_norm": 0.3415766698066426, |
|
"learning_rate": 2.0425335732700075e-06, |
|
"loss": 0.4494, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.1971830985915495, |
|
"grad_norm": 0.35037808653487684, |
|
"learning_rate": 2.0293287406754976e-06, |
|
"loss": 0.4618, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.3635106636596117, |
|
"learning_rate": 2.0161558545550925e-06, |
|
"loss": 0.458, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.202816901408451, |
|
"grad_norm": 0.3305404475827585, |
|
"learning_rate": 2.0030150565694566e-06, |
|
"loss": 0.4566, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.2056338028169016, |
|
"grad_norm": 0.36482202121638047, |
|
"learning_rate": 1.989906488034181e-06, |
|
"loss": 0.4296, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.2084507042253523, |
|
"grad_norm": 0.38281315419014894, |
|
"learning_rate": 1.976830289918261e-06, |
|
"loss": 0.4497, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.211267605633803, |
|
"grad_norm": 0.3401713857213208, |
|
"learning_rate": 1.9637866028425835e-06, |
|
"loss": 0.4579, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.2140845070422537, |
|
"grad_norm": 0.38608594850184313, |
|
"learning_rate": 1.950775567078411e-06, |
|
"loss": 0.4632, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.2169014084507044, |
|
"grad_norm": 0.377240139541956, |
|
"learning_rate": 1.9377973225458773e-06, |
|
"loss": 0.4448, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.219718309859155, |
|
"grad_norm": 0.3486255139361687, |
|
"learning_rate": 1.924852008812479e-06, |
|
"loss": 0.4786, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.222535211267606, |
|
"grad_norm": 0.35076148165325083, |
|
"learning_rate": 1.9119397650915774e-06, |
|
"loss": 0.4462, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.2253521126760565, |
|
"grad_norm": 0.3531332691720524, |
|
"learning_rate": 1.8990607302408991e-06, |
|
"loss": 0.456, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.228169014084507, |
|
"grad_norm": 0.3366858659315378, |
|
"learning_rate": 1.8862150427610453e-06, |
|
"loss": 0.4326, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.230985915492958, |
|
"grad_norm": 0.3636085103892985, |
|
"learning_rate": 1.8734028407940003e-06, |
|
"loss": 0.4444, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.2338028169014086, |
|
"grad_norm": 0.38948440503258513, |
|
"learning_rate": 1.8606242621216443e-06, |
|
"loss": 0.4569, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.2366197183098593, |
|
"grad_norm": 0.3780810617916765, |
|
"learning_rate": 1.84787944416428e-06, |
|
"loss": 0.4694, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.23943661971831, |
|
"grad_norm": 0.3603299392400602, |
|
"learning_rate": 1.835168523979144e-06, |
|
"loss": 0.447, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.2422535211267607, |
|
"grad_norm": 0.38407466446122274, |
|
"learning_rate": 1.822491638258938e-06, |
|
"loss": 0.4648, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.2450704225352114, |
|
"grad_norm": 0.37915726449650966, |
|
"learning_rate": 1.8098489233303595e-06, |
|
"loss": 0.4677, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.247887323943662, |
|
"grad_norm": 0.39145196282091305, |
|
"learning_rate": 1.7972405151526339e-06, |
|
"loss": 0.4641, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.250704225352113, |
|
"grad_norm": 0.34475277984897656, |
|
"learning_rate": 1.7846665493160548e-06, |
|
"loss": 0.4529, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.2535211267605635, |
|
"grad_norm": 0.3704879117075785, |
|
"learning_rate": 1.7721271610405232e-06, |
|
"loss": 0.4305, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.256338028169014, |
|
"grad_norm": 0.37161976738203256, |
|
"learning_rate": 1.7596224851740938e-06, |
|
"loss": 0.4825, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.259154929577465, |
|
"grad_norm": 0.3524012690682922, |
|
"learning_rate": 1.747152656191527e-06, |
|
"loss": 0.4266, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.2619718309859156, |
|
"grad_norm": 0.3753464662024848, |
|
"learning_rate": 1.7347178081928407e-06, |
|
"loss": 0.4544, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.2647887323943663, |
|
"grad_norm": 0.34831747174644373, |
|
"learning_rate": 1.7223180749018675e-06, |
|
"loss": 0.4749, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.267605633802817, |
|
"grad_norm": 0.3790722784544341, |
|
"learning_rate": 1.7099535896648205e-06, |
|
"loss": 0.4571, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.2704225352112677, |
|
"grad_norm": 0.3529967844043124, |
|
"learning_rate": 1.6976244854488545e-06, |
|
"loss": 0.4538, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.2732394366197184, |
|
"grad_norm": 0.36580993633176867, |
|
"learning_rate": 1.6853308948406387e-06, |
|
"loss": 0.4559, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.276056338028169, |
|
"grad_norm": 0.33773543663604155, |
|
"learning_rate": 1.6730729500449322e-06, |
|
"loss": 0.4491, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.27887323943662, |
|
"grad_norm": 0.3818724429980544, |
|
"learning_rate": 1.6608507828831572e-06, |
|
"loss": 0.4471, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.2816901408450705, |
|
"grad_norm": 0.3941087744995697, |
|
"learning_rate": 1.648664524791988e-06, |
|
"loss": 0.4477, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.284507042253521, |
|
"grad_norm": 0.38090837598627414, |
|
"learning_rate": 1.636514306821933e-06, |
|
"loss": 0.4517, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.287323943661972, |
|
"grad_norm": 0.34949670113720865, |
|
"learning_rate": 1.6244002596359255e-06, |
|
"loss": 0.4522, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.2901408450704226, |
|
"grad_norm": 0.3547350758000349, |
|
"learning_rate": 1.6123225135079212e-06, |
|
"loss": 0.4562, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.2929577464788733, |
|
"grad_norm": 0.38160013725900366, |
|
"learning_rate": 1.6002811983214962e-06, |
|
"loss": 0.4669, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.295774647887324, |
|
"grad_norm": 0.35170542214837286, |
|
"learning_rate": 1.5882764435684473e-06, |
|
"loss": 0.4547, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.2985915492957747, |
|
"grad_norm": 0.3465195660150116, |
|
"learning_rate": 1.5763083783474048e-06, |
|
"loss": 0.4701, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.3014084507042254, |
|
"grad_norm": 0.38491085471157266, |
|
"learning_rate": 1.5643771313624394e-06, |
|
"loss": 0.4272, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.304225352112676, |
|
"grad_norm": 0.3759115395560408, |
|
"learning_rate": 1.552482830921681e-06, |
|
"loss": 0.4707, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.307042253521127, |
|
"grad_norm": 0.3594305645877611, |
|
"learning_rate": 1.5406256049359359e-06, |
|
"loss": 0.4775, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.3098591549295775, |
|
"grad_norm": 0.33521577112471695, |
|
"learning_rate": 1.5288055809173174e-06, |
|
"loss": 0.4516, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.312676056338028, |
|
"grad_norm": 0.3674079420964908, |
|
"learning_rate": 1.517022885977868e-06, |
|
"loss": 0.4617, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.315492957746479, |
|
"grad_norm": 0.34186215662243075, |
|
"learning_rate": 1.5052776468281933e-06, |
|
"loss": 0.4335, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.3183098591549296, |
|
"grad_norm": 0.3958145612608977, |
|
"learning_rate": 1.4935699897761031e-06, |
|
"loss": 0.4662, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.3211267605633803, |
|
"grad_norm": 0.3898936062833255, |
|
"learning_rate": 1.4819000407252498e-06, |
|
"loss": 0.4604, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.323943661971831, |
|
"grad_norm": 0.3566184175212025, |
|
"learning_rate": 1.4702679251737768e-06, |
|
"loss": 0.4691, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.3267605633802817, |
|
"grad_norm": 0.3556640563120501, |
|
"learning_rate": 1.4586737682129653e-06, |
|
"loss": 0.4419, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.3295774647887324, |
|
"grad_norm": 0.3482765499624595, |
|
"learning_rate": 1.4471176945258947e-06, |
|
"loss": 0.464, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.332394366197183, |
|
"grad_norm": 0.37652126714004625, |
|
"learning_rate": 1.435599828386095e-06, |
|
"loss": 0.4478, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.335211267605634, |
|
"grad_norm": 0.34935552553112986, |
|
"learning_rate": 1.4241202936562164e-06, |
|
"loss": 0.4529, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.3380281690140845, |
|
"grad_norm": 0.319947879082873, |
|
"learning_rate": 1.412679213786694e-06, |
|
"loss": 0.4521, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.340845070422535, |
|
"grad_norm": 0.36668567938711244, |
|
"learning_rate": 1.40127671181442e-06, |
|
"loss": 0.4502, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.343661971830986, |
|
"grad_norm": 0.3408592392806045, |
|
"learning_rate": 1.3899129103614229e-06, |
|
"loss": 0.4566, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.3464788732394366, |
|
"grad_norm": 0.3670600399372913, |
|
"learning_rate": 1.3785879316335448e-06, |
|
"loss": 0.4362, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.3492957746478873, |
|
"grad_norm": 0.3542407917474685, |
|
"learning_rate": 1.3673018974191354e-06, |
|
"loss": 0.4475, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.352112676056338, |
|
"grad_norm": 0.3510002567312064, |
|
"learning_rate": 1.3560549290877318e-06, |
|
"loss": 0.4598, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.3549295774647887, |
|
"grad_norm": 0.3547228481357092, |
|
"learning_rate": 1.3448471475887587e-06, |
|
"loss": 0.4728, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.3577464788732394, |
|
"grad_norm": 0.345967228254802, |
|
"learning_rate": 1.3336786734502294e-06, |
|
"loss": 0.4392, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.36056338028169, |
|
"grad_norm": 0.3614460954714215, |
|
"learning_rate": 1.3225496267774452e-06, |
|
"loss": 0.4492, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.363380281690141, |
|
"grad_norm": 0.364394650984224, |
|
"learning_rate": 1.311460127251708e-06, |
|
"loss": 0.4733, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.3661971830985915, |
|
"grad_norm": 0.35019486310976833, |
|
"learning_rate": 1.3004102941290297e-06, |
|
"loss": 0.4441, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.369014084507042, |
|
"grad_norm": 0.3706637778515585, |
|
"learning_rate": 1.2894002462388533e-06, |
|
"loss": 0.4575, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.371830985915493, |
|
"grad_norm": 0.32959072572438813, |
|
"learning_rate": 1.2784301019827705e-06, |
|
"loss": 0.4596, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.3746478873239436, |
|
"grad_norm": 0.3424607431994825, |
|
"learning_rate": 1.2674999793332539e-06, |
|
"loss": 0.4431, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.3774647887323943, |
|
"grad_norm": 0.3676756813732258, |
|
"learning_rate": 1.2566099958323824e-06, |
|
"loss": 0.4356, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.380281690140845, |
|
"grad_norm": 0.36058800592394114, |
|
"learning_rate": 1.2457602685905834e-06, |
|
"loss": 0.4316, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.3830985915492957, |
|
"grad_norm": 0.3585818367347638, |
|
"learning_rate": 1.2349509142853672e-06, |
|
"loss": 0.4674, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.3859154929577464, |
|
"grad_norm": 0.34790992953742333, |
|
"learning_rate": 1.224182049160077e-06, |
|
"loss": 0.4646, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.388732394366197, |
|
"grad_norm": 0.36177326432975354, |
|
"learning_rate": 1.2134537890226366e-06, |
|
"loss": 0.4697, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.391549295774648, |
|
"grad_norm": 0.3503908592783261, |
|
"learning_rate": 1.202766249244306e-06, |
|
"loss": 0.4678, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.3943661971830985, |
|
"grad_norm": 0.34200180240853983, |
|
"learning_rate": 1.1921195447584388e-06, |
|
"loss": 0.4549, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.397183098591549, |
|
"grad_norm": 0.3449841650449462, |
|
"learning_rate": 1.1815137900592488e-06, |
|
"loss": 0.4514, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.32359310287845633, |
|
"learning_rate": 1.1709490992005774e-06, |
|
"loss": 0.4587, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.4028169014084506, |
|
"grad_norm": 0.362853451889369, |
|
"learning_rate": 1.1604255857946667e-06, |
|
"loss": 0.4528, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.4056338028169013, |
|
"grad_norm": 0.3824838433363387, |
|
"learning_rate": 1.149943363010938e-06, |
|
"loss": 0.4692, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.408450704225352, |
|
"grad_norm": 0.309820114431787, |
|
"learning_rate": 1.1395025435747759e-06, |
|
"loss": 0.4605, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.4112676056338027, |
|
"grad_norm": 0.367780334595774, |
|
"learning_rate": 1.1291032397663142e-06, |
|
"loss": 0.4378, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.4140845070422534, |
|
"grad_norm": 0.31307329625243296, |
|
"learning_rate": 1.1187455634192307e-06, |
|
"loss": 0.453, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.416901408450704, |
|
"grad_norm": 0.3432753242652767, |
|
"learning_rate": 1.108429625919541e-06, |
|
"loss": 0.464, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.419718309859155, |
|
"grad_norm": 0.35797494481992975, |
|
"learning_rate": 1.098155538204404e-06, |
|
"loss": 0.4343, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.4225352112676055, |
|
"grad_norm": 0.35317578270799466, |
|
"learning_rate": 1.08792341076093e-06, |
|
"loss": 0.4677, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.425352112676056, |
|
"grad_norm": 0.36299248843589665, |
|
"learning_rate": 1.0777333536249873e-06, |
|
"loss": 0.4479, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.428169014084507, |
|
"grad_norm": 0.35957601988988697, |
|
"learning_rate": 1.067585476380023e-06, |
|
"loss": 0.4423, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.4309859154929576, |
|
"grad_norm": 0.3379973267546356, |
|
"learning_rate": 1.0574798881558834e-06, |
|
"loss": 0.4411, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.4338028169014083, |
|
"grad_norm": 0.3471607834308851, |
|
"learning_rate": 1.0474166976276396e-06, |
|
"loss": 0.4478, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.436619718309859, |
|
"grad_norm": 0.35472546641648245, |
|
"learning_rate": 1.0373960130144206e-06, |
|
"loss": 0.4522, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.4394366197183097, |
|
"grad_norm": 0.3587010132644766, |
|
"learning_rate": 1.0274179420782487e-06, |
|
"loss": 0.4792, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.4422535211267604, |
|
"grad_norm": 0.3462299368390657, |
|
"learning_rate": 1.0174825921228803e-06, |
|
"loss": 0.4453, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.445070422535211, |
|
"grad_norm": 0.32780133736017886, |
|
"learning_rate": 1.0075900699926523e-06, |
|
"loss": 0.4421, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.447887323943662, |
|
"grad_norm": 0.32980232448683194, |
|
"learning_rate": 9.977404820713315e-07, |
|
"loss": 0.4354, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.4507042253521125, |
|
"grad_norm": 0.35953716883112774, |
|
"learning_rate": 9.879339342809741e-07, |
|
"loss": 0.4674, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.453521126760563, |
|
"grad_norm": 0.3149172395510989, |
|
"learning_rate": 9.781705320807833e-07, |
|
"loss": 0.448, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.456338028169014, |
|
"grad_norm": 0.3599954533086056, |
|
"learning_rate": 9.684503804659773e-07, |
|
"loss": 0.4487, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.4591549295774646, |
|
"grad_norm": 0.3310011588753474, |
|
"learning_rate": 9.587735839666573e-07, |
|
"loss": 0.431, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.4619718309859153, |
|
"grad_norm": 0.3367067646735182, |
|
"learning_rate": 9.491402466466893e-07, |
|
"loss": 0.4454, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.464788732394366, |
|
"grad_norm": 0.3403390583206632, |
|
"learning_rate": 9.395504721025773e-07, |
|
"loss": 0.4463, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.4676056338028167, |
|
"grad_norm": 0.34010604525094434, |
|
"learning_rate": 9.300043634623546e-07, |
|
"loss": 0.4462, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.4704225352112674, |
|
"grad_norm": 0.34940346894141644, |
|
"learning_rate": 9.205020233844736e-07, |
|
"loss": 0.4389, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.473239436619718, |
|
"grad_norm": 0.33818530430000493, |
|
"learning_rate": 9.110435540567003e-07, |
|
"loss": 0.4528, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.476056338028169, |
|
"grad_norm": 0.32513521696510156, |
|
"learning_rate": 9.016290571950171e-07, |
|
"loss": 0.4475, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.4788732394366195, |
|
"grad_norm": 0.348467054421719, |
|
"learning_rate": 8.922586340425288e-07, |
|
"loss": 0.4434, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.4816901408450702, |
|
"grad_norm": 0.34598411047159433, |
|
"learning_rate": 8.829323853683719e-07, |
|
"loss": 0.4764, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.4845070422535214, |
|
"grad_norm": 0.33736914824699643, |
|
"learning_rate": 8.736504114666345e-07, |
|
"loss": 0.4477, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.4873239436619716, |
|
"grad_norm": 0.34008166784157434, |
|
"learning_rate": 8.644128121552742e-07, |
|
"loss": 0.4529, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.4901408450704228, |
|
"grad_norm": 0.31419004202982526, |
|
"learning_rate": 8.552196867750462e-07, |
|
"loss": 0.4575, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.492957746478873, |
|
"grad_norm": 0.38346622415801657, |
|
"learning_rate": 8.460711341884353e-07, |
|
"loss": 0.461, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.495774647887324, |
|
"grad_norm": 0.36160333663713207, |
|
"learning_rate": 8.36967252778591e-07, |
|
"loss": 0.4704, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.4985915492957744, |
|
"grad_norm": 0.3426582254986613, |
|
"learning_rate": 8.279081404482748e-07, |
|
"loss": 0.449, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.5014084507042256, |
|
"grad_norm": 0.3362056509073023, |
|
"learning_rate": 8.188938946187991e-07, |
|
"loss": 0.4551, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.504225352112676, |
|
"grad_norm": 0.35776312404702704, |
|
"learning_rate": 8.099246122289861e-07, |
|
"loss": 0.4616, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.507042253521127, |
|
"grad_norm": 0.3516358075048254, |
|
"learning_rate": 8.010003897341212e-07, |
|
"loss": 0.4354, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.5098591549295772, |
|
"grad_norm": 0.37697790136846654, |
|
"learning_rate": 7.921213231049196e-07, |
|
"loss": 0.4655, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.5126760563380284, |
|
"grad_norm": 0.338057431043845, |
|
"learning_rate": 7.832875078264912e-07, |
|
"loss": 0.4607, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.5154929577464786, |
|
"grad_norm": 0.34095350497165633, |
|
"learning_rate": 7.74499038897315e-07, |
|
"loss": 0.4537, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.5183098591549298, |
|
"grad_norm": 0.3626292402362458, |
|
"learning_rate": 7.65756010828217e-07, |
|
"loss": 0.4437, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.52112676056338, |
|
"grad_norm": 0.33536228545310875, |
|
"learning_rate": 7.570585176413547e-07, |
|
"loss": 0.4506, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.523943661971831, |
|
"grad_norm": 0.34757819690062497, |
|
"learning_rate": 7.484066528692041e-07, |
|
"loss": 0.4533, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.5267605633802814, |
|
"grad_norm": 0.3544426851266078, |
|
"learning_rate": 7.398005095535565e-07, |
|
"loss": 0.455, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.5295774647887326, |
|
"grad_norm": 0.35854954315916865, |
|
"learning_rate": 7.312401802445169e-07, |
|
"loss": 0.4554, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.532394366197183, |
|
"grad_norm": 0.34802065964448864, |
|
"learning_rate": 7.227257569995061e-07, |
|
"loss": 0.4418, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.535211267605634, |
|
"grad_norm": 0.33929091253717597, |
|
"learning_rate": 7.142573313822754e-07, |
|
"loss": 0.4546, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.5380281690140842, |
|
"grad_norm": 0.3455549381014243, |
|
"learning_rate": 7.058349944619186e-07, |
|
"loss": 0.4551, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.5408450704225354, |
|
"grad_norm": 0.3730269906106179, |
|
"learning_rate": 6.974588368118934e-07, |
|
"loss": 0.4418, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.543661971830986, |
|
"grad_norm": 0.3586546013910396, |
|
"learning_rate": 6.89128948509048e-07, |
|
"loss": 0.4409, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.546478873239437, |
|
"grad_norm": 0.3213019984256461, |
|
"learning_rate": 6.808454191326519e-07, |
|
"loss": 0.4395, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.5492957746478875, |
|
"grad_norm": 0.3260075624126433, |
|
"learning_rate": 6.726083377634318e-07, |
|
"loss": 0.4524, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.552112676056338, |
|
"grad_norm": 0.33357432795345565, |
|
"learning_rate": 6.644177929826162e-07, |
|
"loss": 0.464, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.554929577464789, |
|
"grad_norm": 0.36335471129130653, |
|
"learning_rate": 6.562738728709795e-07, |
|
"loss": 0.4508, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.5577464788732396, |
|
"grad_norm": 0.3648297045285258, |
|
"learning_rate": 6.481766650078969e-07, |
|
"loss": 0.4448, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.5605633802816903, |
|
"grad_norm": 0.3428914158054443, |
|
"learning_rate": 6.401262564704019e-07, |
|
"loss": 0.456, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.563380281690141, |
|
"grad_norm": 0.33333366790531865, |
|
"learning_rate": 6.321227338322511e-07, |
|
"loss": 0.4627, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.5661971830985917, |
|
"grad_norm": 0.34912392956937827, |
|
"learning_rate": 6.241661831629902e-07, |
|
"loss": 0.4632, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.5690140845070424, |
|
"grad_norm": 0.34822142796084354, |
|
"learning_rate": 6.162566900270311e-07, |
|
"loss": 0.462, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.571830985915493, |
|
"grad_norm": 0.31962377759459676, |
|
"learning_rate": 6.083943394827329e-07, |
|
"loss": 0.4153, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.574647887323944, |
|
"grad_norm": 0.33869239576486176, |
|
"learning_rate": 6.005792160814821e-07, |
|
"loss": 0.4401, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.5774647887323945, |
|
"grad_norm": 0.33793034444750547, |
|
"learning_rate": 5.928114038667888e-07, |
|
"loss": 0.4726, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.580281690140845, |
|
"grad_norm": 0.3237369136857151, |
|
"learning_rate": 5.850909863733784e-07, |
|
"loss": 0.438, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.583098591549296, |
|
"grad_norm": 0.3375720011474295, |
|
"learning_rate": 5.774180466262985e-07, |
|
"loss": 0.4344, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.5859154929577466, |
|
"grad_norm": 0.3580068599295479, |
|
"learning_rate": 5.697926671400194e-07, |
|
"loss": 0.4512, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.5887323943661973, |
|
"grad_norm": 0.34625637202820847, |
|
"learning_rate": 5.622149299175539e-07, |
|
"loss": 0.4609, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.591549295774648, |
|
"grad_norm": 0.34840785097504035, |
|
"learning_rate": 5.546849164495688e-07, |
|
"loss": 0.4423, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.5943661971830987, |
|
"grad_norm": 0.3325264584554415, |
|
"learning_rate": 5.472027077135145e-07, |
|
"loss": 0.4358, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.5971830985915494, |
|
"grad_norm": 0.3412839911435819, |
|
"learning_rate": 5.397683841727485e-07, |
|
"loss": 0.4217, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.31438716871686184, |
|
"learning_rate": 5.323820257756745e-07, |
|
"loss": 0.4428, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.602816901408451, |
|
"grad_norm": 0.335053022759397, |
|
"learning_rate": 5.250437119548817e-07, |
|
"loss": 0.4472, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.6056338028169015, |
|
"grad_norm": 0.3386301442199217, |
|
"learning_rate": 5.177535216262885e-07, |
|
"loss": 0.4469, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.608450704225352, |
|
"grad_norm": 0.3402630657380998, |
|
"learning_rate": 5.105115331882954e-07, |
|
"loss": 0.4717, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.611267605633803, |
|
"grad_norm": 0.33515652533891976, |
|
"learning_rate": 5.033178245209436e-07, |
|
"loss": 0.4606, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.6140845070422536, |
|
"grad_norm": 0.37526213127846714, |
|
"learning_rate": 4.961724729850731e-07, |
|
"loss": 0.4673, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.6169014084507043, |
|
"grad_norm": 0.3297554414539737, |
|
"learning_rate": 4.890755554214954e-07, |
|
"loss": 0.4298, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.619718309859155, |
|
"grad_norm": 0.3757308737779317, |
|
"learning_rate": 4.820271481501642e-07, |
|
"loss": 0.4469, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.6225352112676057, |
|
"grad_norm": 0.3560626177653075, |
|
"learning_rate": 4.7502732696935507e-07, |
|
"loss": 0.4478, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.6253521126760564, |
|
"grad_norm": 0.3566774647442687, |
|
"learning_rate": 4.680761671548517e-07, |
|
"loss": 0.4594, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.628169014084507, |
|
"grad_norm": 0.32636544667152256, |
|
"learning_rate": 4.6117374345913454e-07, |
|
"loss": 0.4491, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.630985915492958, |
|
"grad_norm": 0.3297355569759064, |
|
"learning_rate": 4.5432013011057984e-07, |
|
"loss": 0.4562, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.6338028169014085, |
|
"grad_norm": 0.33291951317085183, |
|
"learning_rate": 4.4751540081265645e-07, |
|
"loss": 0.4713, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.636619718309859, |
|
"grad_norm": 0.3364588123495725, |
|
"learning_rate": 4.407596287431387e-07, |
|
"loss": 0.4701, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.63943661971831, |
|
"grad_norm": 0.3569801867527304, |
|
"learning_rate": 4.340528865533161e-07, |
|
"loss": 0.4602, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.6422535211267606, |
|
"grad_norm": 0.3457740049414934, |
|
"learning_rate": 4.2739524636721207e-07, |
|
"loss": 0.4523, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.6450704225352113, |
|
"grad_norm": 0.3616089928125598, |
|
"learning_rate": 4.207867797808102e-07, |
|
"loss": 0.4178, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.647887323943662, |
|
"grad_norm": 0.3153735514351465, |
|
"learning_rate": 4.1422755786128364e-07, |
|
"loss": 0.4484, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.6507042253521127, |
|
"grad_norm": 0.32313727348187915, |
|
"learning_rate": 4.0771765114622886e-07, |
|
"loss": 0.4544, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.6535211267605634, |
|
"grad_norm": 0.32805844915240456, |
|
"learning_rate": 4.012571296429102e-07, |
|
"loss": 0.4383, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.656338028169014, |
|
"grad_norm": 0.31287096142273935, |
|
"learning_rate": 3.948460628275047e-07, |
|
"loss": 0.4502, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.659154929577465, |
|
"grad_norm": 0.3371841187040242, |
|
"learning_rate": 3.8848451964435594e-07, |
|
"loss": 0.4557, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.6619718309859155, |
|
"grad_norm": 0.32003737481875727, |
|
"learning_rate": 3.8217256850523243e-07, |
|
"loss": 0.4348, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.664788732394366, |
|
"grad_norm": 0.33975544248849016, |
|
"learning_rate": 3.759102772885925e-07, |
|
"loss": 0.4421, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.667605633802817, |
|
"grad_norm": 0.3266362705011225, |
|
"learning_rate": 3.696977133388524e-07, |
|
"loss": 0.4501, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.6704225352112676, |
|
"grad_norm": 0.32481434731258335, |
|
"learning_rate": 3.635349434656638e-07, |
|
"loss": 0.459, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.6732394366197183, |
|
"grad_norm": 0.34436172325722814, |
|
"learning_rate": 3.5742203394319606e-07, |
|
"loss": 0.4543, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.676056338028169, |
|
"grad_norm": 0.35110488824156655, |
|
"learning_rate": 3.513590505094222e-07, |
|
"loss": 0.4497, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.6788732394366197, |
|
"grad_norm": 0.3315333572014592, |
|
"learning_rate": 3.453460583654106e-07, |
|
"loss": 0.4306, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.6816901408450704, |
|
"grad_norm": 0.34897677030004204, |
|
"learning_rate": 3.3938312217462686e-07, |
|
"loss": 0.4403, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.684507042253521, |
|
"grad_norm": 0.3438403164590787, |
|
"learning_rate": 3.334703060622374e-07, |
|
"loss": 0.4564, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.687323943661972, |
|
"grad_norm": 0.3207018561351413, |
|
"learning_rate": 3.2760767361441847e-07, |
|
"loss": 0.475, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.6901408450704225, |
|
"grad_norm": 0.3383111012033015, |
|
"learning_rate": 3.2179528787767313e-07, |
|
"loss": 0.452, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.692957746478873, |
|
"grad_norm": 0.3341858993813047, |
|
"learning_rate": 3.1603321135815434e-07, |
|
"loss": 0.4584, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.695774647887324, |
|
"grad_norm": 0.3257817149894772, |
|
"learning_rate": 3.103215060209902e-07, |
|
"loss": 0.4378, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.6985915492957746, |
|
"grad_norm": 0.32341143095549446, |
|
"learning_rate": 3.046602332896209e-07, |
|
"loss": 0.4533, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.7014084507042253, |
|
"grad_norm": 0.33437110577140444, |
|
"learning_rate": 2.9904945404513606e-07, |
|
"loss": 0.4423, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.704225352112676, |
|
"grad_norm": 0.3280497699271315, |
|
"learning_rate": 2.9348922862562025e-07, |
|
"loss": 0.4344, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.7070422535211267, |
|
"grad_norm": 0.3173486025834307, |
|
"learning_rate": 2.879796168255039e-07, |
|
"loss": 0.443, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.7098591549295774, |
|
"grad_norm": 0.3249778481256936, |
|
"learning_rate": 2.825206778949219e-07, |
|
"loss": 0.4638, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.712676056338028, |
|
"grad_norm": 0.3206975925043833, |
|
"learning_rate": 2.771124705390743e-07, |
|
"loss": 0.4371, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.715492957746479, |
|
"grad_norm": 0.3214786347868462, |
|
"learning_rate": 2.717550529175955e-07, |
|
"loss": 0.4563, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.7183098591549295, |
|
"grad_norm": 0.3436233501105619, |
|
"learning_rate": 2.664484826439312e-07, |
|
"loss": 0.4552, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.72112676056338, |
|
"grad_norm": 0.3474231981101561, |
|
"learning_rate": 2.611928167847133e-07, |
|
"loss": 0.46, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.723943661971831, |
|
"grad_norm": 0.34528907019271443, |
|
"learning_rate": 2.5598811185915397e-07, |
|
"loss": 0.4652, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.7267605633802816, |
|
"grad_norm": 0.31957962452572586, |
|
"learning_rate": 2.5083442383843136e-07, |
|
"loss": 0.4302, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.7295774647887323, |
|
"grad_norm": 0.33868297083611953, |
|
"learning_rate": 2.457318081450899e-07, |
|
"loss": 0.4635, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.732394366197183, |
|
"grad_norm": 0.3650237660288557, |
|
"learning_rate": 2.4068031965244486e-07, |
|
"loss": 0.4438, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.7352112676056337, |
|
"grad_norm": 0.3422038198247917, |
|
"learning_rate": 2.3568001268399088e-07, |
|
"loss": 0.4496, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.7380281690140844, |
|
"grad_norm": 0.3161758059021394, |
|
"learning_rate": 2.3073094101282056e-07, |
|
"loss": 0.4496, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.740845070422535, |
|
"grad_norm": 0.3252943511474426, |
|
"learning_rate": 2.258331578610423e-07, |
|
"loss": 0.4431, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.743661971830986, |
|
"grad_norm": 0.35599450055875387, |
|
"learning_rate": 2.2098671589921017e-07, |
|
"loss": 0.4461, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.7464788732394365, |
|
"grad_norm": 0.3441924744953093, |
|
"learning_rate": 2.1619166724575924e-07, |
|
"loss": 0.4509, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.749295774647887, |
|
"grad_norm": 0.3488688599968552, |
|
"learning_rate": 2.114480634664401e-07, |
|
"loss": 0.4507, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.752112676056338, |
|
"grad_norm": 0.355239783276596, |
|
"learning_rate": 2.0675595557376916e-07, |
|
"loss": 0.4718, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.7549295774647886, |
|
"grad_norm": 0.3437164703895729, |
|
"learning_rate": 2.0211539402647807e-07, |
|
"loss": 0.4654, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.7577464788732393, |
|
"grad_norm": 0.31614760184812657, |
|
"learning_rate": 1.9752642872897078e-07, |
|
"loss": 0.4676, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.76056338028169, |
|
"grad_norm": 0.321515520308783, |
|
"learning_rate": 1.9298910903078838e-07, |
|
"loss": 0.4305, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.7633802816901407, |
|
"grad_norm": 0.34992635800783795, |
|
"learning_rate": 1.8850348372607575e-07, |
|
"loss": 0.4538, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.7661971830985914, |
|
"grad_norm": 0.30276880004114215, |
|
"learning_rate": 1.840696010530596e-07, |
|
"loss": 0.4567, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.769014084507042, |
|
"grad_norm": 0.34441547278059703, |
|
"learning_rate": 1.7968750869352792e-07, |
|
"loss": 0.4453, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.771830985915493, |
|
"grad_norm": 0.30998669583959265, |
|
"learning_rate": 1.753572537723186e-07, |
|
"loss": 0.4425, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.7746478873239435, |
|
"grad_norm": 0.3586629209274004, |
|
"learning_rate": 1.7107888285681106e-07, |
|
"loss": 0.4564, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.777464788732394, |
|
"grad_norm": 0.32695128234004484, |
|
"learning_rate": 1.6685244195642715e-07, |
|
"loss": 0.4637, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.780281690140845, |
|
"grad_norm": 0.34115355713855117, |
|
"learning_rate": 1.6267797652213368e-07, |
|
"loss": 0.4477, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.7830985915492956, |
|
"grad_norm": 0.2930530495084058, |
|
"learning_rate": 1.5855553144595858e-07, |
|
"loss": 0.4566, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.7859154929577463, |
|
"grad_norm": 0.33022571130555106, |
|
"learning_rate": 1.5448515106050165e-07, |
|
"loss": 0.4308, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.788732394366197, |
|
"grad_norm": 0.352771290999933, |
|
"learning_rate": 1.5046687913846392e-07, |
|
"loss": 0.4705, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.7915492957746477, |
|
"grad_norm": 0.32707483621349326, |
|
"learning_rate": 1.4650075889217297e-07, |
|
"loss": 0.4602, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.7943661971830984, |
|
"grad_norm": 0.32654783950949157, |
|
"learning_rate": 1.4258683297311891e-07, |
|
"loss": 0.4479, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.7971830985915496, |
|
"grad_norm": 0.35198467602408556, |
|
"learning_rate": 1.3872514347149756e-07, |
|
"loss": 0.4448, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.342116234775216, |
|
"learning_rate": 1.3491573191575513e-07, |
|
"loss": 0.4582, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.802816901408451, |
|
"grad_norm": 0.3507902235732393, |
|
"learning_rate": 1.3115863927214423e-07, |
|
"loss": 0.4546, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.8056338028169012, |
|
"grad_norm": 0.3238046742423758, |
|
"learning_rate": 1.2745390594428143e-07, |
|
"loss": 0.4516, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.8084507042253524, |
|
"grad_norm": 0.32569158910871976, |
|
"learning_rate": 1.2380157177271369e-07, |
|
"loss": 0.4531, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.8112676056338026, |
|
"grad_norm": 0.3072177170540146, |
|
"learning_rate": 1.202016760344893e-07, |
|
"loss": 0.4297, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.8140845070422538, |
|
"grad_norm": 0.3158277495077995, |
|
"learning_rate": 1.166542574427354e-07, |
|
"loss": 0.4397, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.816901408450704, |
|
"grad_norm": 0.35987034251993116, |
|
"learning_rate": 1.131593541462439e-07, |
|
"loss": 0.4656, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.819718309859155, |
|
"grad_norm": 0.33708080153812514, |
|
"learning_rate": 1.0971700372905736e-07, |
|
"loss": 0.4521, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.8225352112676054, |
|
"grad_norm": 0.3389935828063123, |
|
"learning_rate": 1.0632724321006816e-07, |
|
"loss": 0.4576, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.8253521126760566, |
|
"grad_norm": 0.32671457088330047, |
|
"learning_rate": 1.0299010904261886e-07, |
|
"loss": 0.4413, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.828169014084507, |
|
"grad_norm": 0.3447563387897561, |
|
"learning_rate": 9.970563711410974e-08, |
|
"loss": 0.4534, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.830985915492958, |
|
"grad_norm": 0.3701007432833937, |
|
"learning_rate": 9.647386274561466e-08, |
|
"loss": 0.465, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.8338028169014082, |
|
"grad_norm": 0.31675165370176617, |
|
"learning_rate": 9.329482069149853e-08, |
|
"loss": 0.454, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.8366197183098594, |
|
"grad_norm": 0.3449482405049473, |
|
"learning_rate": 9.016854513904828e-08, |
|
"loss": 0.4599, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.8394366197183096, |
|
"grad_norm": 0.31943993823528516, |
|
"learning_rate": 8.709506970809855e-08, |
|
"loss": 0.4683, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.8422535211267608, |
|
"grad_norm": 0.30682247155872994, |
|
"learning_rate": 8.407442745067552e-08, |
|
"loss": 0.4634, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.845070422535211, |
|
"grad_norm": 0.3068025583439518, |
|
"learning_rate": 8.110665085063918e-08, |
|
"loss": 0.4362, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.847887323943662, |
|
"grad_norm": 0.3326409740995297, |
|
"learning_rate": 7.819177182333493e-08, |
|
"loss": 0.4637, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.8507042253521124, |
|
"grad_norm": 0.37249201472232357, |
|
"learning_rate": 7.532982171524927e-08, |
|
"loss": 0.4492, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.8535211267605636, |
|
"grad_norm": 0.33303906691187746, |
|
"learning_rate": 7.252083130367349e-08, |
|
"loss": 0.4478, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.856338028169014, |
|
"grad_norm": 0.32196502935195925, |
|
"learning_rate": 6.97648307963733e-08, |
|
"loss": 0.4601, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.859154929577465, |
|
"grad_norm": 0.34642386779898743, |
|
"learning_rate": 6.706184983126196e-08, |
|
"loss": 0.43, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.8619718309859152, |
|
"grad_norm": 0.34765468257149207, |
|
"learning_rate": 6.441191747608322e-08, |
|
"loss": 0.4512, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.8647887323943664, |
|
"grad_norm": 0.34037074374729687, |
|
"learning_rate": 6.181506222809885e-08, |
|
"loss": 0.4439, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.8676056338028166, |
|
"grad_norm": 0.3169812664050294, |
|
"learning_rate": 5.9271312013781665e-08, |
|
"loss": 0.443, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.870422535211268, |
|
"grad_norm": 0.32211817240340407, |
|
"learning_rate": 5.678069418851351e-08, |
|
"loss": 0.4676, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.873239436619718, |
|
"grad_norm": 0.3359515873734084, |
|
"learning_rate": 5.4343235536294944e-08, |
|
"loss": 0.4516, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.876056338028169, |
|
"grad_norm": 0.34578515983344105, |
|
"learning_rate": 5.195896226945385e-08, |
|
"loss": 0.4407, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.87887323943662, |
|
"grad_norm": 0.31439962540381255, |
|
"learning_rate": 4.9627900028365036e-08, |
|
"loss": 0.4435, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.8816901408450706, |
|
"grad_norm": 0.34336441391655215, |
|
"learning_rate": 4.735007388117441e-08, |
|
"loss": 0.4386, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.8845070422535213, |
|
"grad_norm": 0.335578371195018, |
|
"learning_rate": 4.5125508323528025e-08, |
|
"loss": 0.4289, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.887323943661972, |
|
"grad_norm": 0.350643103957519, |
|
"learning_rate": 4.2954227278310666e-08, |
|
"loss": 0.4464, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.8901408450704227, |
|
"grad_norm": 0.35068154909138555, |
|
"learning_rate": 4.083625409538772e-08, |
|
"loss": 0.4326, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.8929577464788734, |
|
"grad_norm": 0.34185081917190546, |
|
"learning_rate": 3.877161155135423e-08, |
|
"loss": 0.4593, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.895774647887324, |
|
"grad_norm": 0.3300724585491567, |
|
"learning_rate": 3.676032184928957e-08, |
|
"loss": 0.457, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.898591549295775, |
|
"grad_norm": 0.3411195609156822, |
|
"learning_rate": 3.4802406618518195e-08, |
|
"loss": 0.456, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.9014084507042255, |
|
"grad_norm": 0.3637968816563219, |
|
"learning_rate": 3.289788691437923e-08, |
|
"loss": 0.4394, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.904225352112676, |
|
"grad_norm": 0.3395290765317001, |
|
"learning_rate": 3.10467832179967e-08, |
|
"loss": 0.4506, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.907042253521127, |
|
"grad_norm": 0.3260136250051306, |
|
"learning_rate": 2.9249115436063013e-08, |
|
"loss": 0.4472, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.9098591549295776, |
|
"grad_norm": 0.33498085364365865, |
|
"learning_rate": 2.7504902900621356e-08, |
|
"loss": 0.4434, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.9126760563380283, |
|
"grad_norm": 0.34312265751451554, |
|
"learning_rate": 2.581416436886086e-08, |
|
"loss": 0.4578, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.915492957746479, |
|
"grad_norm": 0.3210591639802571, |
|
"learning_rate": 2.4176918022912333e-08, |
|
"loss": 0.4608, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.9183098591549297, |
|
"grad_norm": 0.3553095286591404, |
|
"learning_rate": 2.2593181469653945e-08, |
|
"loss": 0.4673, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.9211267605633804, |
|
"grad_norm": 0.321850104936369, |
|
"learning_rate": 2.1062971740523076e-08, |
|
"loss": 0.4534, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.923943661971831, |
|
"grad_norm": 0.3229367069170625, |
|
"learning_rate": 1.958630529133032e-08, |
|
"loss": 0.4503, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.926760563380282, |
|
"grad_norm": 0.32793169190757315, |
|
"learning_rate": 1.816319800208466e-08, |
|
"loss": 0.4519, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.9295774647887325, |
|
"grad_norm": 0.3306690320189479, |
|
"learning_rate": 1.679366517682246e-08, |
|
"loss": 0.4484, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.932394366197183, |
|
"grad_norm": 0.35486018214343157, |
|
"learning_rate": 1.547772154344207e-08, |
|
"loss": 0.4564, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.935211267605634, |
|
"grad_norm": 0.3487414018054857, |
|
"learning_rate": 1.4215381253546712e-08, |
|
"loss": 0.4411, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.9380281690140846, |
|
"grad_norm": 0.33851454546424364, |
|
"learning_rate": 1.3006657882290163e-08, |
|
"loss": 0.456, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.9408450704225353, |
|
"grad_norm": 0.33398998229974186, |
|
"learning_rate": 1.1851564428232987e-08, |
|
"loss": 0.4563, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.943661971830986, |
|
"grad_norm": 0.3405604730717625, |
|
"learning_rate": 1.0750113313202082e-08, |
|
"loss": 0.4203, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.9464788732394367, |
|
"grad_norm": 0.3200139617949195, |
|
"learning_rate": 9.70231638215635e-09, |
|
"loss": 0.4472, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.9492957746478874, |
|
"grad_norm": 0.3410890631754851, |
|
"learning_rate": 8.708184903060135e-09, |
|
"loss": 0.4403, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.952112676056338, |
|
"grad_norm": 0.3419082068730841, |
|
"learning_rate": 7.76772956676164e-09, |
|
"loss": 0.4367, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.954929577464789, |
|
"grad_norm": 0.3378106075319102, |
|
"learning_rate": 6.880960486877475e-09, |
|
"loss": 0.4531, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.9577464788732395, |
|
"grad_norm": 0.3278897439419402, |
|
"learning_rate": 6.047887199686075e-09, |
|
"loss": 0.459, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.96056338028169, |
|
"grad_norm": 0.3343112858188528, |
|
"learning_rate": 5.268518664022226e-09, |
|
"loss": 0.459, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.963380281690141, |
|
"grad_norm": 0.32255215872689313, |
|
"learning_rate": 4.54286326118214e-09, |
|
"loss": 0.4336, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.9661971830985916, |
|
"grad_norm": 0.3370218781417737, |
|
"learning_rate": 3.870928794834083e-09, |
|
"loss": 0.4367, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.9690140845070423, |
|
"grad_norm": 0.3323793895278448, |
|
"learning_rate": 3.2527224909328914e-09, |
|
"loss": 0.4625, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.971830985915493, |
|
"grad_norm": 0.33449875147589203, |
|
"learning_rate": 2.6882509976433604e-09, |
|
"loss": 0.4539, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.9746478873239437, |
|
"grad_norm": 0.37530967430639717, |
|
"learning_rate": 2.1775203852680836e-09, |
|
"loss": 0.447, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.9774647887323944, |
|
"grad_norm": 0.32946903451444376, |
|
"learning_rate": 1.7205361461825054e-09, |
|
"loss": 0.443, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.980281690140845, |
|
"grad_norm": 0.3181193426288814, |
|
"learning_rate": 1.317303194776076e-09, |
|
"loss": 0.4281, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.983098591549296, |
|
"grad_norm": 0.34593232841366583, |
|
"learning_rate": 9.678258673995189e-10, |
|
"loss": 0.456, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.9859154929577465, |
|
"grad_norm": 0.3417744937936135, |
|
"learning_rate": 6.721079223165339e-10, |
|
"loss": 0.4374, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.988732394366197, |
|
"grad_norm": 0.3153613761930667, |
|
"learning_rate": 4.301525396666062e-10, |
|
"loss": 0.4499, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.991549295774648, |
|
"grad_norm": 0.31244029690230596, |
|
"learning_rate": 2.4196232142614794e-10, |
|
"loss": 0.4421, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.9943661971830986, |
|
"grad_norm": 0.3084976115932713, |
|
"learning_rate": 1.0753929138629382e-10, |
|
"loss": 0.4275, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.9971830985915493, |
|
"grad_norm": 0.3735924098512123, |
|
"learning_rate": 2.6884895126255695e-11, |
|
"loss": 0.4404, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.33696360890684507, |
|
"learning_rate": 0.0, |
|
"loss": 0.4535, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1065, |
|
"total_flos": 1313980844015616.0, |
|
"train_loss": 0.5105482672861484, |
|
"train_runtime": 17519.9119, |
|
"train_samples_per_second": 5.836, |
|
"train_steps_per_second": 0.061 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1065, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1313980844015616.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|