|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1065, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0028169014084507044, |
|
"grad_norm": 5.39381152989452, |
|
"learning_rate": 9.345794392523364e-08, |
|
"loss": 0.9214, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005633802816901409, |
|
"grad_norm": 5.710911651711583, |
|
"learning_rate": 1.8691588785046729e-07, |
|
"loss": 0.8778, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008450704225352112, |
|
"grad_norm": 4.807270772834428, |
|
"learning_rate": 2.8037383177570096e-07, |
|
"loss": 0.8196, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011267605633802818, |
|
"grad_norm": 5.480036707909385, |
|
"learning_rate": 3.7383177570093457e-07, |
|
"loss": 0.8791, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014084507042253521, |
|
"grad_norm": 5.318989970133928, |
|
"learning_rate": 4.6728971962616824e-07, |
|
"loss": 0.8627, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.016901408450704224, |
|
"grad_norm": 4.809114137429962, |
|
"learning_rate": 5.607476635514019e-07, |
|
"loss": 0.8093, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01971830985915493, |
|
"grad_norm": 5.50714519392, |
|
"learning_rate": 6.542056074766355e-07, |
|
"loss": 0.8247, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.022535211267605635, |
|
"grad_norm": 4.502971860173534, |
|
"learning_rate": 7.476635514018691e-07, |
|
"loss": 0.8387, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02535211267605634, |
|
"grad_norm": 4.575448924350796, |
|
"learning_rate": 8.411214953271029e-07, |
|
"loss": 0.8583, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.028169014084507043, |
|
"grad_norm": 4.883031846315503, |
|
"learning_rate": 9.345794392523365e-07, |
|
"loss": 0.8986, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030985915492957747, |
|
"grad_norm": 3.558388444517262, |
|
"learning_rate": 1.0280373831775702e-06, |
|
"loss": 0.7943, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03380281690140845, |
|
"grad_norm": 3.3653902333255328, |
|
"learning_rate": 1.1214953271028038e-06, |
|
"loss": 0.703, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.036619718309859155, |
|
"grad_norm": 3.5878521578631757, |
|
"learning_rate": 1.2149532710280374e-06, |
|
"loss": 0.8741, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03943661971830986, |
|
"grad_norm": 3.0669396475993276, |
|
"learning_rate": 1.308411214953271e-06, |
|
"loss": 0.7508, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04225352112676056, |
|
"grad_norm": 2.360776473843334, |
|
"learning_rate": 1.4018691588785047e-06, |
|
"loss": 0.7579, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04507042253521127, |
|
"grad_norm": 2.406994148358753, |
|
"learning_rate": 1.4953271028037383e-06, |
|
"loss": 0.7386, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04788732394366197, |
|
"grad_norm": 2.4289659946387627, |
|
"learning_rate": 1.588785046728972e-06, |
|
"loss": 0.8448, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05070422535211268, |
|
"grad_norm": 2.1113226098450655, |
|
"learning_rate": 1.6822429906542057e-06, |
|
"loss": 0.8075, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05352112676056338, |
|
"grad_norm": 2.0005363458129373, |
|
"learning_rate": 1.7757009345794394e-06, |
|
"loss": 0.8043, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.056338028169014086, |
|
"grad_norm": 2.0984615538535456, |
|
"learning_rate": 1.869158878504673e-06, |
|
"loss": 0.7654, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.059154929577464786, |
|
"grad_norm": 2.454463629584827, |
|
"learning_rate": 1.962616822429907e-06, |
|
"loss": 0.7389, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.061971830985915494, |
|
"grad_norm": 2.5756627563324677, |
|
"learning_rate": 2.0560747663551404e-06, |
|
"loss": 0.7537, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0647887323943662, |
|
"grad_norm": 2.3450614128411487, |
|
"learning_rate": 2.149532710280374e-06, |
|
"loss": 0.7403, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0676056338028169, |
|
"grad_norm": 2.21793106800515, |
|
"learning_rate": 2.2429906542056077e-06, |
|
"loss": 0.7722, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07042253521126761, |
|
"grad_norm": 1.7549218172116292, |
|
"learning_rate": 2.3364485981308413e-06, |
|
"loss": 0.7552, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07323943661971831, |
|
"grad_norm": 1.4710972828597284, |
|
"learning_rate": 2.429906542056075e-06, |
|
"loss": 0.7718, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07605633802816901, |
|
"grad_norm": 1.3151046770941317, |
|
"learning_rate": 2.5233644859813085e-06, |
|
"loss": 0.7333, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07887323943661972, |
|
"grad_norm": 1.1938149562869083, |
|
"learning_rate": 2.616822429906542e-06, |
|
"loss": 0.7295, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08169014084507042, |
|
"grad_norm": 1.3259440496917112, |
|
"learning_rate": 2.7102803738317757e-06, |
|
"loss": 0.8544, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08450704225352113, |
|
"grad_norm": 1.249957124569568, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 0.7156, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08732394366197183, |
|
"grad_norm": 1.184686946843432, |
|
"learning_rate": 2.897196261682243e-06, |
|
"loss": 0.7365, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09014084507042254, |
|
"grad_norm": 1.0223702189060315, |
|
"learning_rate": 2.9906542056074766e-06, |
|
"loss": 0.6805, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09295774647887324, |
|
"grad_norm": 1.1473857275242683, |
|
"learning_rate": 3.08411214953271e-06, |
|
"loss": 0.7861, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09577464788732394, |
|
"grad_norm": 1.030743194655851, |
|
"learning_rate": 3.177570093457944e-06, |
|
"loss": 0.7734, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09859154929577464, |
|
"grad_norm": 1.1418816841824204, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 0.7042, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10140845070422536, |
|
"grad_norm": 0.8969695635147503, |
|
"learning_rate": 3.3644859813084115e-06, |
|
"loss": 0.6155, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10422535211267606, |
|
"grad_norm": 1.0522251076635003, |
|
"learning_rate": 3.457943925233645e-06, |
|
"loss": 0.7224, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10704225352112676, |
|
"grad_norm": 1.0245713785476949, |
|
"learning_rate": 3.5514018691588787e-06, |
|
"loss": 0.7261, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10985915492957747, |
|
"grad_norm": 0.9472567384114589, |
|
"learning_rate": 3.6448598130841123e-06, |
|
"loss": 0.6792, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11267605633802817, |
|
"grad_norm": 0.9445593106604601, |
|
"learning_rate": 3.738317757009346e-06, |
|
"loss": 0.6477, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11549295774647887, |
|
"grad_norm": 0.9279518039467434, |
|
"learning_rate": 3.8317757009345796e-06, |
|
"loss": 0.7259, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11830985915492957, |
|
"grad_norm": 0.9154015884052205, |
|
"learning_rate": 3.925233644859814e-06, |
|
"loss": 0.6307, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12112676056338029, |
|
"grad_norm": 0.9828247076921207, |
|
"learning_rate": 4.018691588785047e-06, |
|
"loss": 0.7188, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12394366197183099, |
|
"grad_norm": 0.9916044713420874, |
|
"learning_rate": 4.112149532710281e-06, |
|
"loss": 0.7279, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1267605633802817, |
|
"grad_norm": 0.9328317475978831, |
|
"learning_rate": 4.205607476635514e-06, |
|
"loss": 0.6689, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1295774647887324, |
|
"grad_norm": 0.8726360541682836, |
|
"learning_rate": 4.299065420560748e-06, |
|
"loss": 0.71, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1323943661971831, |
|
"grad_norm": 1.0372408545860783, |
|
"learning_rate": 4.392523364485981e-06, |
|
"loss": 0.7656, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1352112676056338, |
|
"grad_norm": 0.9201045709937832, |
|
"learning_rate": 4.485981308411215e-06, |
|
"loss": 0.7125, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13802816901408452, |
|
"grad_norm": 0.8985693053741854, |
|
"learning_rate": 4.579439252336449e-06, |
|
"loss": 0.6673, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 0.8016552690681369, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 0.6028, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14366197183098592, |
|
"grad_norm": 0.9375805058634695, |
|
"learning_rate": 4.766355140186917e-06, |
|
"loss": 0.7053, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14647887323943662, |
|
"grad_norm": 0.8235300906984858, |
|
"learning_rate": 4.85981308411215e-06, |
|
"loss": 0.709, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14929577464788732, |
|
"grad_norm": 0.8885825883694105, |
|
"learning_rate": 4.953271028037384e-06, |
|
"loss": 0.7004, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.15211267605633802, |
|
"grad_norm": 0.8993913153528437, |
|
"learning_rate": 5.046728971962617e-06, |
|
"loss": 0.6802, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.15492957746478872, |
|
"grad_norm": 0.8690379834087235, |
|
"learning_rate": 5.14018691588785e-06, |
|
"loss": 0.7182, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15774647887323945, |
|
"grad_norm": 1.0168474666547533, |
|
"learning_rate": 5.233644859813084e-06, |
|
"loss": 0.7477, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.16056338028169015, |
|
"grad_norm": 0.8719423698232311, |
|
"learning_rate": 5.3271028037383174e-06, |
|
"loss": 0.74, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16338028169014085, |
|
"grad_norm": 1.026890979433, |
|
"learning_rate": 5.4205607476635515e-06, |
|
"loss": 0.7706, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.16619718309859155, |
|
"grad_norm": 0.8955556843274681, |
|
"learning_rate": 5.514018691588785e-06, |
|
"loss": 0.7149, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.16901408450704225, |
|
"grad_norm": 0.8694856642141823, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 0.7079, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17183098591549295, |
|
"grad_norm": 0.9902510235542388, |
|
"learning_rate": 5.700934579439253e-06, |
|
"loss": 0.6935, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17464788732394365, |
|
"grad_norm": 0.8421158549635541, |
|
"learning_rate": 5.794392523364486e-06, |
|
"loss": 0.6476, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.17746478873239438, |
|
"grad_norm": 0.8618342169352607, |
|
"learning_rate": 5.88785046728972e-06, |
|
"loss": 0.6251, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18028169014084508, |
|
"grad_norm": 0.9627093955885784, |
|
"learning_rate": 5.981308411214953e-06, |
|
"loss": 0.7648, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.18309859154929578, |
|
"grad_norm": 0.8615872113332586, |
|
"learning_rate": 6.074766355140187e-06, |
|
"loss": 0.7052, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.18591549295774648, |
|
"grad_norm": 0.7938477476235895, |
|
"learning_rate": 6.16822429906542e-06, |
|
"loss": 0.6856, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.18873239436619718, |
|
"grad_norm": 0.9314578053407399, |
|
"learning_rate": 6.2616822429906544e-06, |
|
"loss": 0.675, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.19154929577464788, |
|
"grad_norm": 0.8414192598073413, |
|
"learning_rate": 6.355140186915888e-06, |
|
"loss": 0.7094, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19436619718309858, |
|
"grad_norm": 0.9088665681264082, |
|
"learning_rate": 6.448598130841122e-06, |
|
"loss": 0.6834, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.19718309859154928, |
|
"grad_norm": 0.902387432849795, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 0.7152, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9642448781976154, |
|
"learning_rate": 6.635514018691589e-06, |
|
"loss": 0.685, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2028169014084507, |
|
"grad_norm": 0.8474244419117023, |
|
"learning_rate": 6.728971962616823e-06, |
|
"loss": 0.6712, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2056338028169014, |
|
"grad_norm": 0.9474565823957127, |
|
"learning_rate": 6.822429906542056e-06, |
|
"loss": 0.6786, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2084507042253521, |
|
"grad_norm": 0.9289574016521713, |
|
"learning_rate": 6.91588785046729e-06, |
|
"loss": 0.7304, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 0.9127008704865647, |
|
"learning_rate": 7.009345794392523e-06, |
|
"loss": 0.7452, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2140845070422535, |
|
"grad_norm": 0.9662686159807351, |
|
"learning_rate": 7.1028037383177574e-06, |
|
"loss": 0.7134, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21690140845070421, |
|
"grad_norm": 1.0772478476012384, |
|
"learning_rate": 7.196261682242991e-06, |
|
"loss": 0.6781, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.21971830985915494, |
|
"grad_norm": 1.0550174155184562, |
|
"learning_rate": 7.289719626168225e-06, |
|
"loss": 0.6633, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22253521126760564, |
|
"grad_norm": 0.8802108160521099, |
|
"learning_rate": 7.383177570093458e-06, |
|
"loss": 0.6876, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22535211267605634, |
|
"grad_norm": 0.9429110893196399, |
|
"learning_rate": 7.476635514018692e-06, |
|
"loss": 0.7335, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22816901408450704, |
|
"grad_norm": 0.938383459599816, |
|
"learning_rate": 7.570093457943926e-06, |
|
"loss": 0.7397, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23098591549295774, |
|
"grad_norm": 0.9096159486194378, |
|
"learning_rate": 7.663551401869159e-06, |
|
"loss": 0.6888, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.23380281690140844, |
|
"grad_norm": 0.8917639799429912, |
|
"learning_rate": 7.757009345794392e-06, |
|
"loss": 0.7029, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.23661971830985915, |
|
"grad_norm": 0.8979990857654783, |
|
"learning_rate": 7.850467289719627e-06, |
|
"loss": 0.768, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.23943661971830985, |
|
"grad_norm": 0.8993682822907498, |
|
"learning_rate": 7.94392523364486e-06, |
|
"loss": 0.7967, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.24225352112676057, |
|
"grad_norm": 0.9519036462618192, |
|
"learning_rate": 8.037383177570094e-06, |
|
"loss": 0.7236, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.24507042253521127, |
|
"grad_norm": 0.9320429116107417, |
|
"learning_rate": 8.130841121495327e-06, |
|
"loss": 0.7031, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.24788732394366197, |
|
"grad_norm": 1.1968431066091298, |
|
"learning_rate": 8.224299065420562e-06, |
|
"loss": 0.8184, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2507042253521127, |
|
"grad_norm": 1.015717114563735, |
|
"learning_rate": 8.317757009345795e-06, |
|
"loss": 0.7366, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2535211267605634, |
|
"grad_norm": 0.8628312778604383, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 0.6516, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2563380281690141, |
|
"grad_norm": 0.9207687578294539, |
|
"learning_rate": 8.504672897196263e-06, |
|
"loss": 0.7463, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2591549295774648, |
|
"grad_norm": 0.9076929603375471, |
|
"learning_rate": 8.598130841121496e-06, |
|
"loss": 0.7232, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2619718309859155, |
|
"grad_norm": 0.8551379886884969, |
|
"learning_rate": 8.69158878504673e-06, |
|
"loss": 0.6387, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2647887323943662, |
|
"grad_norm": 0.874389963082252, |
|
"learning_rate": 8.785046728971963e-06, |
|
"loss": 0.5928, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2676056338028169, |
|
"grad_norm": 0.9772206166305732, |
|
"learning_rate": 8.878504672897197e-06, |
|
"loss": 0.7431, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2704225352112676, |
|
"grad_norm": 1.0472604316389014, |
|
"learning_rate": 8.97196261682243e-06, |
|
"loss": 0.6916, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.27323943661971833, |
|
"grad_norm": 0.9161522458184038, |
|
"learning_rate": 9.065420560747664e-06, |
|
"loss": 0.7618, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.27605633802816903, |
|
"grad_norm": 0.9694215423750022, |
|
"learning_rate": 9.158878504672899e-06, |
|
"loss": 0.7371, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.27887323943661974, |
|
"grad_norm": 0.9914139238391411, |
|
"learning_rate": 9.252336448598132e-06, |
|
"loss": 0.7141, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 1.0202196257679186, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 0.7255, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28450704225352114, |
|
"grad_norm": 0.9601713649988124, |
|
"learning_rate": 9.439252336448598e-06, |
|
"loss": 0.7014, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.28732394366197184, |
|
"grad_norm": 0.8615212518278053, |
|
"learning_rate": 9.532710280373833e-06, |
|
"loss": 0.6382, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.29014084507042254, |
|
"grad_norm": 1.2055501454669908, |
|
"learning_rate": 9.626168224299066e-06, |
|
"loss": 0.7745, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.29295774647887324, |
|
"grad_norm": 0.8554711418047789, |
|
"learning_rate": 9.7196261682243e-06, |
|
"loss": 0.678, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.29577464788732394, |
|
"grad_norm": 0.9686952732108494, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 0.7142, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.29859154929577464, |
|
"grad_norm": 0.8833954097474772, |
|
"learning_rate": 9.906542056074768e-06, |
|
"loss": 0.6931, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.30140845070422534, |
|
"grad_norm": 0.8996092526129557, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7937, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.30422535211267604, |
|
"grad_norm": 0.9372563478579707, |
|
"learning_rate": 9.999973115104874e-06, |
|
"loss": 0.7803, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.30704225352112674, |
|
"grad_norm": 0.8475734380103672, |
|
"learning_rate": 9.999892460708615e-06, |
|
"loss": 0.701, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.30985915492957744, |
|
"grad_norm": 0.8534917997572844, |
|
"learning_rate": 9.999758037678576e-06, |
|
"loss": 0.7371, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3126760563380282, |
|
"grad_norm": 0.8434562246377327, |
|
"learning_rate": 9.999569847460335e-06, |
|
"loss": 0.7268, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3154929577464789, |
|
"grad_norm": 0.8281726646550165, |
|
"learning_rate": 9.999327892077683e-06, |
|
"loss": 0.6853, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3183098591549296, |
|
"grad_norm": 0.9271927724711224, |
|
"learning_rate": 9.9990321741326e-06, |
|
"loss": 0.6452, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3211267605633803, |
|
"grad_norm": 0.8848897647327078, |
|
"learning_rate": 9.998682696805224e-06, |
|
"loss": 0.7243, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.323943661971831, |
|
"grad_norm": 0.8610699024398826, |
|
"learning_rate": 9.998279463853819e-06, |
|
"loss": 0.6448, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3267605633802817, |
|
"grad_norm": 0.8677582610096554, |
|
"learning_rate": 9.997822479614733e-06, |
|
"loss": 0.6943, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3295774647887324, |
|
"grad_norm": 0.8471875616608381, |
|
"learning_rate": 9.997311749002358e-06, |
|
"loss": 0.685, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3323943661971831, |
|
"grad_norm": 0.8682796024046532, |
|
"learning_rate": 9.996747277509068e-06, |
|
"loss": 0.745, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3352112676056338, |
|
"grad_norm": 0.8341754007010466, |
|
"learning_rate": 9.996129071205167e-06, |
|
"loss": 0.7004, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 0.8707994349697389, |
|
"learning_rate": 9.99545713673882e-06, |
|
"loss": 0.7398, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3408450704225352, |
|
"grad_norm": 0.8082724338122019, |
|
"learning_rate": 9.994731481335979e-06, |
|
"loss": 0.7061, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3436619718309859, |
|
"grad_norm": 0.9249095219534365, |
|
"learning_rate": 9.993952112800314e-06, |
|
"loss": 0.696, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3464788732394366, |
|
"grad_norm": 0.944187877477761, |
|
"learning_rate": 9.993119039513124e-06, |
|
"loss": 0.73, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3492957746478873, |
|
"grad_norm": 1.0057102257733501, |
|
"learning_rate": 9.992232270433239e-06, |
|
"loss": 0.7613, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.352112676056338, |
|
"grad_norm": 0.8333808705246333, |
|
"learning_rate": 9.991291815096941e-06, |
|
"loss": 0.7452, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.35492957746478876, |
|
"grad_norm": 0.8949365308913656, |
|
"learning_rate": 9.990297683617844e-06, |
|
"loss": 0.7289, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.35774647887323946, |
|
"grad_norm": 0.8224952199583537, |
|
"learning_rate": 9.989249886686798e-06, |
|
"loss": 0.697, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.36056338028169016, |
|
"grad_norm": 0.8463117609492709, |
|
"learning_rate": 9.988148435571766e-06, |
|
"loss": 0.6907, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.36338028169014086, |
|
"grad_norm": 0.8097036868252439, |
|
"learning_rate": 9.98699334211771e-06, |
|
"loss": 0.6631, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.36619718309859156, |
|
"grad_norm": 0.8671869864436087, |
|
"learning_rate": 9.985784618746455e-06, |
|
"loss": 0.6995, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.36901408450704226, |
|
"grad_norm": 0.8736051125955552, |
|
"learning_rate": 9.984522278456558e-06, |
|
"loss": 0.7605, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.37183098591549296, |
|
"grad_norm": 0.8479930342434334, |
|
"learning_rate": 9.983206334823179e-06, |
|
"loss": 0.7521, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.37464788732394366, |
|
"grad_norm": 0.8797715865284296, |
|
"learning_rate": 9.981836801997916e-06, |
|
"loss": 0.6507, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.37746478873239436, |
|
"grad_norm": 0.8625026171210389, |
|
"learning_rate": 9.98041369470867e-06, |
|
"loss": 0.7442, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.38028169014084506, |
|
"grad_norm": 0.9674565189535757, |
|
"learning_rate": 9.978937028259477e-06, |
|
"loss": 0.6997, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38309859154929576, |
|
"grad_norm": 0.8204379275136592, |
|
"learning_rate": 9.977406818530346e-06, |
|
"loss": 0.7128, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.38591549295774646, |
|
"grad_norm": 0.9145363476852016, |
|
"learning_rate": 9.975823081977089e-06, |
|
"loss": 0.7915, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.38873239436619716, |
|
"grad_norm": 0.8473502868722297, |
|
"learning_rate": 9.974185835631141e-06, |
|
"loss": 0.6332, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39154929577464787, |
|
"grad_norm": 0.8811521109738086, |
|
"learning_rate": 9.972495097099379e-06, |
|
"loss": 0.6646, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.39436619718309857, |
|
"grad_norm": 0.8420515822936742, |
|
"learning_rate": 9.970750884563938e-06, |
|
"loss": 0.6463, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3971830985915493, |
|
"grad_norm": 0.8796307650058819, |
|
"learning_rate": 9.968953216782005e-06, |
|
"loss": 0.7702, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9113098059629853, |
|
"learning_rate": 9.96710211308562e-06, |
|
"loss": 0.6678, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4028169014084507, |
|
"grad_norm": 0.82206028117941, |
|
"learning_rate": 9.965197593381483e-06, |
|
"loss": 0.7489, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4056338028169014, |
|
"grad_norm": 0.880880001482584, |
|
"learning_rate": 9.963239678150712e-06, |
|
"loss": 0.6868, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4084507042253521, |
|
"grad_norm": 0.9060474241316776, |
|
"learning_rate": 9.961228388448648e-06, |
|
"loss": 0.7212, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4112676056338028, |
|
"grad_norm": 0.9310497997261157, |
|
"learning_rate": 9.959163745904613e-06, |
|
"loss": 0.7736, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4140845070422535, |
|
"grad_norm": 0.7931697883942757, |
|
"learning_rate": 9.95704577272169e-06, |
|
"loss": 0.6676, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4169014084507042, |
|
"grad_norm": 0.8932141674301038, |
|
"learning_rate": 9.954874491676474e-06, |
|
"loss": 0.7192, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4197183098591549, |
|
"grad_norm": 0.9415400016839601, |
|
"learning_rate": 9.952649926118827e-06, |
|
"loss": 0.6864, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 0.7916165651672555, |
|
"learning_rate": 9.950372099971635e-06, |
|
"loss": 0.6611, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4253521126760563, |
|
"grad_norm": 0.8634003427333417, |
|
"learning_rate": 9.948041037730546e-06, |
|
"loss": 0.7326, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.428169014084507, |
|
"grad_norm": 0.9263824014581405, |
|
"learning_rate": 9.945656764463706e-06, |
|
"loss": 0.7068, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4309859154929577, |
|
"grad_norm": 0.7528011216400118, |
|
"learning_rate": 9.943219305811486e-06, |
|
"loss": 0.662, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.43380281690140843, |
|
"grad_norm": 0.8372646453988154, |
|
"learning_rate": 9.940728687986219e-06, |
|
"loss": 0.7213, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.43661971830985913, |
|
"grad_norm": 0.9463740009923008, |
|
"learning_rate": 9.938184937771903e-06, |
|
"loss": 0.6919, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4394366197183099, |
|
"grad_norm": 0.959386942798321, |
|
"learning_rate": 9.935588082523917e-06, |
|
"loss": 0.6449, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4422535211267606, |
|
"grad_norm": 0.9250783308719386, |
|
"learning_rate": 9.93293815016874e-06, |
|
"loss": 0.6796, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4450704225352113, |
|
"grad_norm": 0.7946799691080673, |
|
"learning_rate": 9.930235169203628e-06, |
|
"loss": 0.67, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.447887323943662, |
|
"grad_norm": 0.8522628286586079, |
|
"learning_rate": 9.927479168696327e-06, |
|
"loss": 0.7309, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4507042253521127, |
|
"grad_norm": 0.837190553799222, |
|
"learning_rate": 9.924670178284751e-06, |
|
"loss": 0.6668, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4535211267605634, |
|
"grad_norm": 1.0436089990244963, |
|
"learning_rate": 9.921808228176667e-06, |
|
"loss": 0.7829, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4563380281690141, |
|
"grad_norm": 0.893929704291238, |
|
"learning_rate": 9.918893349149361e-06, |
|
"loss": 0.7153, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4591549295774648, |
|
"grad_norm": 0.8869097673696764, |
|
"learning_rate": 9.915925572549325e-06, |
|
"loss": 0.6485, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4619718309859155, |
|
"grad_norm": 0.8467721185605459, |
|
"learning_rate": 9.912904930291902e-06, |
|
"loss": 0.7482, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4647887323943662, |
|
"grad_norm": 0.897220133636629, |
|
"learning_rate": 9.909831454860953e-06, |
|
"loss": 0.6422, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4676056338028169, |
|
"grad_norm": 0.8919253252137377, |
|
"learning_rate": 9.906705179308503e-06, |
|
"loss": 0.7032, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4704225352112676, |
|
"grad_norm": 0.8857603615046576, |
|
"learning_rate": 9.903526137254386e-06, |
|
"loss": 0.6246, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4732394366197183, |
|
"grad_norm": 0.8283723940151976, |
|
"learning_rate": 9.900294362885892e-06, |
|
"loss": 0.643, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.476056338028169, |
|
"grad_norm": 0.8463508629352864, |
|
"learning_rate": 9.897009890957382e-06, |
|
"loss": 0.7326, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4788732394366197, |
|
"grad_norm": 0.8166043234476047, |
|
"learning_rate": 9.893672756789933e-06, |
|
"loss": 0.6919, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48169014084507045, |
|
"grad_norm": 0.8798248125614349, |
|
"learning_rate": 9.890282996270944e-06, |
|
"loss": 0.7502, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.48450704225352115, |
|
"grad_norm": 0.9137410379876206, |
|
"learning_rate": 9.886840645853757e-06, |
|
"loss": 0.717, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.48732394366197185, |
|
"grad_norm": 0.8682406299826412, |
|
"learning_rate": 9.883345742557265e-06, |
|
"loss": 0.7093, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.49014084507042255, |
|
"grad_norm": 0.768099268451737, |
|
"learning_rate": 9.879798323965512e-06, |
|
"loss": 0.6324, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.49295774647887325, |
|
"grad_norm": 0.7951997110898612, |
|
"learning_rate": 9.876198428227288e-06, |
|
"loss": 0.6043, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.49577464788732395, |
|
"grad_norm": 0.8150266343962548, |
|
"learning_rate": 9.872546094055719e-06, |
|
"loss": 0.7314, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.49859154929577465, |
|
"grad_norm": 0.7567196532773179, |
|
"learning_rate": 9.868841360727856e-06, |
|
"loss": 0.6341, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5014084507042254, |
|
"grad_norm": 0.8057546679078017, |
|
"learning_rate": 9.865084268084247e-06, |
|
"loss": 0.5985, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.504225352112676, |
|
"grad_norm": 0.8248072860159059, |
|
"learning_rate": 9.861274856528504e-06, |
|
"loss": 0.6856, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5070422535211268, |
|
"grad_norm": 0.7422143006416313, |
|
"learning_rate": 9.857413167026881e-06, |
|
"loss": 0.6509, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5098591549295775, |
|
"grad_norm": 0.842171137809266, |
|
"learning_rate": 9.853499241107827e-06, |
|
"loss": 0.6759, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5126760563380282, |
|
"grad_norm": 1.0101293356673013, |
|
"learning_rate": 9.849533120861537e-06, |
|
"loss": 0.838, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5154929577464789, |
|
"grad_norm": 0.7530541447107382, |
|
"learning_rate": 9.845514848939499e-06, |
|
"loss": 0.7384, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5183098591549296, |
|
"grad_norm": 0.914908795328647, |
|
"learning_rate": 9.841444468554043e-06, |
|
"loss": 0.7436, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5211267605633803, |
|
"grad_norm": 0.8862498828793486, |
|
"learning_rate": 9.837322023477866e-06, |
|
"loss": 0.7144, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.523943661971831, |
|
"grad_norm": 0.8056528376306878, |
|
"learning_rate": 9.833147558043574e-06, |
|
"loss": 0.6498, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5267605633802817, |
|
"grad_norm": 0.8011120670486118, |
|
"learning_rate": 9.82892111714319e-06, |
|
"loss": 0.7161, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5295774647887324, |
|
"grad_norm": 0.7572342622375217, |
|
"learning_rate": 9.824642746227682e-06, |
|
"loss": 0.7064, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.532394366197183, |
|
"grad_norm": 0.8039481620735608, |
|
"learning_rate": 9.820312491306471e-06, |
|
"loss": 0.6512, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5352112676056338, |
|
"grad_norm": 0.7738723306767284, |
|
"learning_rate": 9.815930398946942e-06, |
|
"loss": 0.6906, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5380281690140845, |
|
"grad_norm": 0.7165440107878704, |
|
"learning_rate": 9.811496516273925e-06, |
|
"loss": 0.6153, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5408450704225352, |
|
"grad_norm": 0.8319057805907693, |
|
"learning_rate": 9.807010890969214e-06, |
|
"loss": 0.6864, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.543661971830986, |
|
"grad_norm": 0.773189524368119, |
|
"learning_rate": 9.80247357127103e-06, |
|
"loss": 0.7204, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5464788732394367, |
|
"grad_norm": 0.7653522714932102, |
|
"learning_rate": 9.797884605973523e-06, |
|
"loss": 0.6266, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5492957746478874, |
|
"grad_norm": 0.76169482878666, |
|
"learning_rate": 9.793244044426233e-06, |
|
"loss": 0.6837, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5521126760563381, |
|
"grad_norm": 0.853170087758016, |
|
"learning_rate": 9.788551936533561e-06, |
|
"loss": 0.7598, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5549295774647888, |
|
"grad_norm": 0.8075207803578713, |
|
"learning_rate": 9.783808332754242e-06, |
|
"loss": 0.733, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5577464788732395, |
|
"grad_norm": 0.8517323289471529, |
|
"learning_rate": 9.779013284100791e-06, |
|
"loss": 0.7236, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5605633802816902, |
|
"grad_norm": 0.7997362496535447, |
|
"learning_rate": 9.77416684213896e-06, |
|
"loss": 0.712, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.8430063817076954, |
|
"learning_rate": 9.769269058987181e-06, |
|
"loss": 0.6284, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5661971830985916, |
|
"grad_norm": 0.8121616168773351, |
|
"learning_rate": 9.76431998731601e-06, |
|
"loss": 0.6711, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5690140845070423, |
|
"grad_norm": 0.8827282760216014, |
|
"learning_rate": 9.759319680347556e-06, |
|
"loss": 0.7354, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.571830985915493, |
|
"grad_norm": 0.853130964046107, |
|
"learning_rate": 9.75426819185491e-06, |
|
"loss": 0.7771, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5746478873239437, |
|
"grad_norm": 0.9947412584733369, |
|
"learning_rate": 9.74916557616157e-06, |
|
"loss": 0.7, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5774647887323944, |
|
"grad_norm": 0.8769746348104829, |
|
"learning_rate": 9.744011888140847e-06, |
|
"loss": 0.6514, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5802816901408451, |
|
"grad_norm": 0.9184017741530913, |
|
"learning_rate": 9.738807183215288e-06, |
|
"loss": 0.7239, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5830985915492958, |
|
"grad_norm": 0.8842913206665063, |
|
"learning_rate": 9.73355151735607e-06, |
|
"loss": 0.684, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5859154929577465, |
|
"grad_norm": 0.794506224518057, |
|
"learning_rate": 9.728244947082405e-06, |
|
"loss": 0.6926, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5887323943661972, |
|
"grad_norm": 0.8074121706042865, |
|
"learning_rate": 9.722887529460928e-06, |
|
"loss": 0.7134, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5915492957746479, |
|
"grad_norm": 0.8230238825095924, |
|
"learning_rate": 9.717479322105079e-06, |
|
"loss": 0.7206, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5943661971830986, |
|
"grad_norm": 0.9594950928708149, |
|
"learning_rate": 9.712020383174496e-06, |
|
"loss": 0.6946, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5971830985915493, |
|
"grad_norm": 0.7521256870035444, |
|
"learning_rate": 9.70651077137438e-06, |
|
"loss": 0.5711, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7671581105894907, |
|
"learning_rate": 9.700950545954867e-06, |
|
"loss": 0.6589, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6028169014084507, |
|
"grad_norm": 0.8178947584831048, |
|
"learning_rate": 9.695339766710382e-06, |
|
"loss": 0.723, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6056338028169014, |
|
"grad_norm": 0.8003431837226573, |
|
"learning_rate": 9.689678493979011e-06, |
|
"loss": 0.6493, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6084507042253521, |
|
"grad_norm": 0.7952819058062531, |
|
"learning_rate": 9.683966788641848e-06, |
|
"loss": 0.6962, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6112676056338028, |
|
"grad_norm": 0.7826856935612455, |
|
"learning_rate": 9.678204712122328e-06, |
|
"loss": 0.7371, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6140845070422535, |
|
"grad_norm": 0.7746418412525581, |
|
"learning_rate": 9.672392326385582e-06, |
|
"loss": 0.6902, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6169014084507042, |
|
"grad_norm": 0.940402540251371, |
|
"learning_rate": 9.666529693937763e-06, |
|
"loss": 0.779, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6197183098591549, |
|
"grad_norm": 0.8449206053067433, |
|
"learning_rate": 9.660616877825374e-06, |
|
"loss": 0.6905, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6225352112676056, |
|
"grad_norm": 0.7994133282679672, |
|
"learning_rate": 9.65465394163459e-06, |
|
"loss": 0.7171, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6253521126760564, |
|
"grad_norm": 0.8032156386437383, |
|
"learning_rate": 9.64864094949058e-06, |
|
"loss": 0.6782, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6281690140845071, |
|
"grad_norm": 0.9003451206809043, |
|
"learning_rate": 9.642577966056806e-06, |
|
"loss": 0.7278, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6309859154929578, |
|
"grad_norm": 0.7775350319628389, |
|
"learning_rate": 9.636465056534337e-06, |
|
"loss": 0.6913, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 0.8310388732940904, |
|
"learning_rate": 9.630302286661149e-06, |
|
"loss": 0.7544, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6366197183098592, |
|
"grad_norm": 0.8853987442843421, |
|
"learning_rate": 9.624089722711409e-06, |
|
"loss": 0.6923, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6394366197183099, |
|
"grad_norm": 0.8804755951508887, |
|
"learning_rate": 9.617827431494769e-06, |
|
"loss": 0.6605, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6422535211267606, |
|
"grad_norm": 0.8366059157406054, |
|
"learning_rate": 9.611515480355644e-06, |
|
"loss": 0.7357, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6450704225352113, |
|
"grad_norm": 0.7210382994869629, |
|
"learning_rate": 9.605153937172495e-06, |
|
"loss": 0.6835, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.647887323943662, |
|
"grad_norm": 0.8739466441160235, |
|
"learning_rate": 9.598742870357089e-06, |
|
"loss": 0.6739, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6507042253521127, |
|
"grad_norm": 0.8654712248667421, |
|
"learning_rate": 9.592282348853772e-06, |
|
"loss": 0.7532, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6535211267605634, |
|
"grad_norm": 0.9065008902303662, |
|
"learning_rate": 9.585772442138717e-06, |
|
"loss": 0.734, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6563380281690141, |
|
"grad_norm": 0.7871365715275687, |
|
"learning_rate": 9.57921322021919e-06, |
|
"loss": 0.6635, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6591549295774648, |
|
"grad_norm": 0.7642041831022736, |
|
"learning_rate": 9.572604753632789e-06, |
|
"loss": 0.6484, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6619718309859155, |
|
"grad_norm": 0.8271285548719295, |
|
"learning_rate": 9.565947113446685e-06, |
|
"loss": 0.6601, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6647887323943662, |
|
"grad_norm": 0.7538375562561086, |
|
"learning_rate": 9.559240371256861e-06, |
|
"loss": 0.6902, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6676056338028169, |
|
"grad_norm": 0.8043626834134439, |
|
"learning_rate": 9.552484599187344e-06, |
|
"loss": 0.6368, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6704225352112676, |
|
"grad_norm": 0.7529150606741751, |
|
"learning_rate": 9.545679869889422e-06, |
|
"loss": 0.7056, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6732394366197183, |
|
"grad_norm": 0.7284992451418077, |
|
"learning_rate": 9.538826256540866e-06, |
|
"loss": 0.701, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.676056338028169, |
|
"grad_norm": 0.7430111850931722, |
|
"learning_rate": 9.53192383284515e-06, |
|
"loss": 0.6279, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6788732394366197, |
|
"grad_norm": 0.8112227328214626, |
|
"learning_rate": 9.524972673030646e-06, |
|
"loss": 0.6795, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6816901408450704, |
|
"grad_norm": 0.7709521327734363, |
|
"learning_rate": 9.517972851849837e-06, |
|
"loss": 0.6938, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6845070422535211, |
|
"grad_norm": 0.839896263111437, |
|
"learning_rate": 9.510924444578505e-06, |
|
"loss": 0.7249, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6873239436619718, |
|
"grad_norm": 0.7232560690742893, |
|
"learning_rate": 9.503827527014927e-06, |
|
"loss": 0.6598, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6901408450704225, |
|
"grad_norm": 0.8123508737767481, |
|
"learning_rate": 9.496682175479058e-06, |
|
"loss": 0.7528, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6929577464788732, |
|
"grad_norm": 0.8462883686229686, |
|
"learning_rate": 9.489488466811706e-06, |
|
"loss": 0.6947, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6957746478873239, |
|
"grad_norm": 0.9431639768896318, |
|
"learning_rate": 9.482246478373713e-06, |
|
"loss": 0.8318, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.6985915492957746, |
|
"grad_norm": 0.7818012727406521, |
|
"learning_rate": 9.47495628804512e-06, |
|
"loss": 0.7279, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7014084507042253, |
|
"grad_norm": 0.8562275464337439, |
|
"learning_rate": 9.467617974224326e-06, |
|
"loss": 0.7178, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 0.7745218443030316, |
|
"learning_rate": 9.460231615827254e-06, |
|
"loss": 0.6763, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7070422535211267, |
|
"grad_norm": 0.8938034789544281, |
|
"learning_rate": 9.452797292286486e-06, |
|
"loss": 0.7282, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7098591549295775, |
|
"grad_norm": 0.8421964462964258, |
|
"learning_rate": 9.445315083550432e-06, |
|
"loss": 0.6092, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7126760563380282, |
|
"grad_norm": 0.8842902719459901, |
|
"learning_rate": 9.437785070082448e-06, |
|
"loss": 0.7439, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7154929577464789, |
|
"grad_norm": 0.8295883668473504, |
|
"learning_rate": 9.430207332859981e-06, |
|
"loss": 0.6941, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7183098591549296, |
|
"grad_norm": 0.7997799100355183, |
|
"learning_rate": 9.422581953373704e-06, |
|
"loss": 0.6726, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7211267605633803, |
|
"grad_norm": 0.8249888871807052, |
|
"learning_rate": 9.414909013626624e-06, |
|
"loss": 0.7556, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.723943661971831, |
|
"grad_norm": 0.7139160556941465, |
|
"learning_rate": 9.407188596133212e-06, |
|
"loss": 0.7071, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7267605633802817, |
|
"grad_norm": 0.8024124317877482, |
|
"learning_rate": 9.399420783918518e-06, |
|
"loss": 0.6568, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7295774647887324, |
|
"grad_norm": 0.8584267549374202, |
|
"learning_rate": 9.391605660517268e-06, |
|
"loss": 0.6367, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7323943661971831, |
|
"grad_norm": 0.7902851358778393, |
|
"learning_rate": 9.38374330997297e-06, |
|
"loss": 0.6593, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7352112676056338, |
|
"grad_norm": 0.8023474180886204, |
|
"learning_rate": 9.375833816837012e-06, |
|
"loss": 0.6464, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7380281690140845, |
|
"grad_norm": 0.8154573367134973, |
|
"learning_rate": 9.367877266167752e-06, |
|
"loss": 0.8018, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7408450704225352, |
|
"grad_norm": 0.8115241369192978, |
|
"learning_rate": 9.3598737435296e-06, |
|
"loss": 0.7639, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7436619718309859, |
|
"grad_norm": 0.7849626692594518, |
|
"learning_rate": 9.351823334992105e-06, |
|
"loss": 0.6817, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7464788732394366, |
|
"grad_norm": 0.9197729948526122, |
|
"learning_rate": 9.343726127129023e-06, |
|
"loss": 0.7335, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7492957746478873, |
|
"grad_norm": 0.7275321442153452, |
|
"learning_rate": 9.335582207017385e-06, |
|
"loss": 0.7261, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.752112676056338, |
|
"grad_norm": 0.8163208868320786, |
|
"learning_rate": 9.327391662236569e-06, |
|
"loss": 0.741, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7549295774647887, |
|
"grad_norm": 0.7840261409488222, |
|
"learning_rate": 9.31915458086735e-06, |
|
"loss": 0.6902, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7577464788732394, |
|
"grad_norm": 0.8479200482066225, |
|
"learning_rate": 9.310871051490953e-06, |
|
"loss": 0.6263, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7605633802816901, |
|
"grad_norm": 0.7707125463434206, |
|
"learning_rate": 9.302541163188107e-06, |
|
"loss": 0.7071, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7633802816901408, |
|
"grad_norm": 0.8206470495747352, |
|
"learning_rate": 9.294165005538083e-06, |
|
"loss": 0.7162, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7661971830985915, |
|
"grad_norm": 0.7938247034692177, |
|
"learning_rate": 9.285742668617725e-06, |
|
"loss": 0.7303, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7690140845070422, |
|
"grad_norm": 0.7724192077582562, |
|
"learning_rate": 9.277274243000495e-06, |
|
"loss": 0.6745, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7718309859154929, |
|
"grad_norm": 0.7556279269726222, |
|
"learning_rate": 9.268759819755484e-06, |
|
"loss": 0.6684, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.7746478873239436, |
|
"grad_norm": 0.9010158273015102, |
|
"learning_rate": 9.260199490446444e-06, |
|
"loss": 0.6968, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7774647887323943, |
|
"grad_norm": 0.9190880584517591, |
|
"learning_rate": 9.251593347130796e-06, |
|
"loss": 0.6965, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.780281690140845, |
|
"grad_norm": 0.7392835129813884, |
|
"learning_rate": 9.242941482358646e-06, |
|
"loss": 0.6955, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7830985915492957, |
|
"grad_norm": 0.7721524689242966, |
|
"learning_rate": 9.234243989171784e-06, |
|
"loss": 0.7349, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7859154929577464, |
|
"grad_norm": 0.700499216910955, |
|
"learning_rate": 9.225500961102685e-06, |
|
"loss": 0.6621, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7887323943661971, |
|
"grad_norm": 0.7596012409463865, |
|
"learning_rate": 9.216712492173509e-06, |
|
"loss": 0.73, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7915492957746478, |
|
"grad_norm": 0.7112477735132746, |
|
"learning_rate": 9.20787867689508e-06, |
|
"loss": 0.6936, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7943661971830986, |
|
"grad_norm": 0.7538333397867995, |
|
"learning_rate": 9.19899961026588e-06, |
|
"loss": 0.7108, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7971830985915493, |
|
"grad_norm": 0.7500840684245914, |
|
"learning_rate": 9.190075387771014e-06, |
|
"loss": 0.6424, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.7767104224997774, |
|
"learning_rate": 9.181106105381201e-06, |
|
"loss": 0.7673, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8028169014084507, |
|
"grad_norm": 0.7051945870953593, |
|
"learning_rate": 9.172091859551727e-06, |
|
"loss": 0.6375, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8056338028169014, |
|
"grad_norm": 0.8187705644445149, |
|
"learning_rate": 9.16303274722141e-06, |
|
"loss": 0.7782, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8084507042253521, |
|
"grad_norm": 0.7805258045768315, |
|
"learning_rate": 9.153928865811567e-06, |
|
"loss": 0.7418, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8112676056338028, |
|
"grad_norm": 0.8128901606684851, |
|
"learning_rate": 9.144780313224955e-06, |
|
"loss": 0.7399, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8140845070422535, |
|
"grad_norm": 0.8154420318456077, |
|
"learning_rate": 9.135587187844727e-06, |
|
"loss": 0.723, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8169014084507042, |
|
"grad_norm": 0.7858069446808291, |
|
"learning_rate": 9.126349588533367e-06, |
|
"loss": 0.68, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.819718309859155, |
|
"grad_norm": 0.8223570062612096, |
|
"learning_rate": 9.117067614631629e-06, |
|
"loss": 0.7178, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8225352112676056, |
|
"grad_norm": 0.7361952237465246, |
|
"learning_rate": 9.107741365957473e-06, |
|
"loss": 0.7253, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8253521126760563, |
|
"grad_norm": 0.7410997914967067, |
|
"learning_rate": 9.098370942804984e-06, |
|
"loss": 0.7365, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.828169014084507, |
|
"grad_norm": 0.7841577337029458, |
|
"learning_rate": 9.0889564459433e-06, |
|
"loss": 0.6961, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8309859154929577, |
|
"grad_norm": 0.7103875687394275, |
|
"learning_rate": 9.079497976615527e-06, |
|
"loss": 0.7009, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8338028169014085, |
|
"grad_norm": 0.8613504465876265, |
|
"learning_rate": 9.069995636537646e-06, |
|
"loss": 0.8999, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8366197183098592, |
|
"grad_norm": 0.7512880794878137, |
|
"learning_rate": 9.060449527897424e-06, |
|
"loss": 0.7458, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8394366197183099, |
|
"grad_norm": 0.7108104992364117, |
|
"learning_rate": 9.050859753353312e-06, |
|
"loss": 0.6101, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8422535211267606, |
|
"grad_norm": 0.7613675413497479, |
|
"learning_rate": 9.041226416033344e-06, |
|
"loss": 0.6791, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 0.7227340858608536, |
|
"learning_rate": 9.031549619534025e-06, |
|
"loss": 0.62, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.847887323943662, |
|
"grad_norm": 0.739015826440164, |
|
"learning_rate": 9.021829467919218e-06, |
|
"loss": 0.6194, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8507042253521127, |
|
"grad_norm": 0.7621930325437081, |
|
"learning_rate": 9.012066065719026e-06, |
|
"loss": 0.7342, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8535211267605634, |
|
"grad_norm": 0.7518088571559766, |
|
"learning_rate": 9.00225951792867e-06, |
|
"loss": 0.734, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.856338028169014, |
|
"grad_norm": 0.7619799485709029, |
|
"learning_rate": 8.99240993000735e-06, |
|
"loss": 0.6668, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8591549295774648, |
|
"grad_norm": 0.7512429320136312, |
|
"learning_rate": 8.98251740787712e-06, |
|
"loss": 0.6285, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8619718309859155, |
|
"grad_norm": 0.7687230386597979, |
|
"learning_rate": 8.972582057921753e-06, |
|
"loss": 0.6658, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8647887323943662, |
|
"grad_norm": 0.7939832464335207, |
|
"learning_rate": 8.962603986985582e-06, |
|
"loss": 0.7445, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8676056338028169, |
|
"grad_norm": 0.7384840890732082, |
|
"learning_rate": 8.952583302372361e-06, |
|
"loss": 0.6054, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8704225352112676, |
|
"grad_norm": 0.7383794192616162, |
|
"learning_rate": 8.942520111844117e-06, |
|
"loss": 0.6556, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8732394366197183, |
|
"grad_norm": 0.7927695830739503, |
|
"learning_rate": 8.932414523619979e-06, |
|
"loss": 0.7918, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8760563380281691, |
|
"grad_norm": 0.8017064580913946, |
|
"learning_rate": 8.922266646375012e-06, |
|
"loss": 0.7132, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8788732394366198, |
|
"grad_norm": 0.7792989124184551, |
|
"learning_rate": 8.912076589239071e-06, |
|
"loss": 0.7115, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8816901408450705, |
|
"grad_norm": 0.7369492414275604, |
|
"learning_rate": 8.901844461795597e-06, |
|
"loss": 0.6636, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8845070422535212, |
|
"grad_norm": 0.7168268633853285, |
|
"learning_rate": 8.89157037408046e-06, |
|
"loss": 0.6391, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.8873239436619719, |
|
"grad_norm": 0.7684412827406625, |
|
"learning_rate": 8.881254436580771e-06, |
|
"loss": 0.721, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8901408450704226, |
|
"grad_norm": 0.7399940724937712, |
|
"learning_rate": 8.870896760233687e-06, |
|
"loss": 0.7014, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.8929577464788733, |
|
"grad_norm": 0.7427553650129637, |
|
"learning_rate": 8.860497456425226e-06, |
|
"loss": 0.7486, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.895774647887324, |
|
"grad_norm": 0.7355572590337344, |
|
"learning_rate": 8.850056636989064e-06, |
|
"loss": 0.6474, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.8985915492957747, |
|
"grad_norm": 0.7670739435633628, |
|
"learning_rate": 8.839574414205335e-06, |
|
"loss": 0.7253, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9014084507042254, |
|
"grad_norm": 0.7223450642414903, |
|
"learning_rate": 8.829050900799426e-06, |
|
"loss": 0.6343, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9042253521126761, |
|
"grad_norm": 0.8339291046044646, |
|
"learning_rate": 8.818486209940754e-06, |
|
"loss": 0.7616, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9070422535211268, |
|
"grad_norm": 0.769620973035585, |
|
"learning_rate": 8.807880455241562e-06, |
|
"loss": 0.7673, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9098591549295775, |
|
"grad_norm": 0.851306065801614, |
|
"learning_rate": 8.797233750755696e-06, |
|
"loss": 0.7491, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9126760563380282, |
|
"grad_norm": 0.7651622574675689, |
|
"learning_rate": 8.786546210977364e-06, |
|
"loss": 0.7015, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9154929577464789, |
|
"grad_norm": 0.71441222991302, |
|
"learning_rate": 8.775817950839925e-06, |
|
"loss": 0.6956, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9183098591549296, |
|
"grad_norm": 0.7506938193868895, |
|
"learning_rate": 8.765049085714634e-06, |
|
"loss": 0.6509, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9211267605633803, |
|
"grad_norm": 0.8071556078391223, |
|
"learning_rate": 8.754239731409418e-06, |
|
"loss": 0.7149, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.923943661971831, |
|
"grad_norm": 0.7818360292624567, |
|
"learning_rate": 8.743390004167618e-06, |
|
"loss": 0.7043, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9267605633802817, |
|
"grad_norm": 0.81081711408685, |
|
"learning_rate": 8.732500020666748e-06, |
|
"loss": 0.6703, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9295774647887324, |
|
"grad_norm": 0.7372182871990494, |
|
"learning_rate": 8.72156989801723e-06, |
|
"loss": 0.7131, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9323943661971831, |
|
"grad_norm": 0.7249976512649938, |
|
"learning_rate": 8.710599753761148e-06, |
|
"loss": 0.5962, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9352112676056338, |
|
"grad_norm": 0.7427619837327385, |
|
"learning_rate": 8.699589705870972e-06, |
|
"loss": 0.6572, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9380281690140845, |
|
"grad_norm": 0.7244013374655381, |
|
"learning_rate": 8.688539872748293e-06, |
|
"loss": 0.7367, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9408450704225352, |
|
"grad_norm": 0.7966894332891572, |
|
"learning_rate": 8.677450373222555e-06, |
|
"loss": 0.72, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9436619718309859, |
|
"grad_norm": 0.6590452238575061, |
|
"learning_rate": 8.666321326549771e-06, |
|
"loss": 0.6308, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9464788732394366, |
|
"grad_norm": 0.7153952945703177, |
|
"learning_rate": 8.655152852411242e-06, |
|
"loss": 0.6736, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.9492957746478873, |
|
"grad_norm": 0.6900657816220632, |
|
"learning_rate": 8.643945070912269e-06, |
|
"loss": 0.6529, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.952112676056338, |
|
"grad_norm": 0.7842151250405361, |
|
"learning_rate": 8.632698102580866e-06, |
|
"loss": 0.6962, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9549295774647887, |
|
"grad_norm": 0.6970865060911673, |
|
"learning_rate": 8.621412068366455e-06, |
|
"loss": 0.6492, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.9577464788732394, |
|
"grad_norm": 0.828143820100817, |
|
"learning_rate": 8.61008708963858e-06, |
|
"loss": 0.6946, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9605633802816902, |
|
"grad_norm": 0.7922646487759859, |
|
"learning_rate": 8.598723288185582e-06, |
|
"loss": 0.7274, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9633802816901409, |
|
"grad_norm": 0.7009049498810189, |
|
"learning_rate": 8.587320786213308e-06, |
|
"loss": 0.653, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9661971830985916, |
|
"grad_norm": 0.7345915900001514, |
|
"learning_rate": 8.575879706343786e-06, |
|
"loss": 0.69, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9690140845070423, |
|
"grad_norm": 0.7390349577087024, |
|
"learning_rate": 8.564400171613907e-06, |
|
"loss": 0.6721, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.971830985915493, |
|
"grad_norm": 0.7240412147255116, |
|
"learning_rate": 8.552882305474106e-06, |
|
"loss": 0.7208, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9746478873239437, |
|
"grad_norm": 0.7431098381900512, |
|
"learning_rate": 8.541326231787036e-06, |
|
"loss": 0.7387, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9774647887323944, |
|
"grad_norm": 0.7044683661504436, |
|
"learning_rate": 8.529732074826225e-06, |
|
"loss": 0.6333, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9802816901408451, |
|
"grad_norm": 0.6550673857067976, |
|
"learning_rate": 8.518099959274751e-06, |
|
"loss": 0.6141, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9830985915492958, |
|
"grad_norm": 0.6811939354097548, |
|
"learning_rate": 8.506430010223899e-06, |
|
"loss": 0.695, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9859154929577465, |
|
"grad_norm": 0.7188183598452683, |
|
"learning_rate": 8.494722353171807e-06, |
|
"loss": 0.6578, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9887323943661972, |
|
"grad_norm": 0.753120522013002, |
|
"learning_rate": 8.482977114022133e-06, |
|
"loss": 0.719, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.9915492957746479, |
|
"grad_norm": 0.6573052433756366, |
|
"learning_rate": 8.471194419082683e-06, |
|
"loss": 0.6247, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.9943661971830986, |
|
"grad_norm": 0.6972129024473817, |
|
"learning_rate": 8.459374395064066e-06, |
|
"loss": 0.7089, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.9971830985915493, |
|
"grad_norm": 0.6991471510748611, |
|
"learning_rate": 8.447517169078322e-06, |
|
"loss": 0.7019, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7539991152104814, |
|
"learning_rate": 8.435622868637562e-06, |
|
"loss": 0.6866, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0028169014084507, |
|
"grad_norm": 0.7309316536347265, |
|
"learning_rate": 8.423691621652597e-06, |
|
"loss": 0.6191, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0056338028169014, |
|
"grad_norm": 0.7327659006791467, |
|
"learning_rate": 8.411723556431555e-06, |
|
"loss": 0.5881, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.008450704225352, |
|
"grad_norm": 0.71963119446251, |
|
"learning_rate": 8.399718801678507e-06, |
|
"loss": 0.5709, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0112676056338028, |
|
"grad_norm": 0.6650645225444921, |
|
"learning_rate": 8.38767748649208e-06, |
|
"loss": 0.5872, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0140845070422535, |
|
"grad_norm": 0.6962048521955931, |
|
"learning_rate": 8.375599740364076e-06, |
|
"loss": 0.6494, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0169014084507042, |
|
"grad_norm": 0.732308607955144, |
|
"learning_rate": 8.363485693178068e-06, |
|
"loss": 0.6065, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.019718309859155, |
|
"grad_norm": 0.7479553465916133, |
|
"learning_rate": 8.351335475208013e-06, |
|
"loss": 0.5926, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0225352112676056, |
|
"grad_norm": 0.8104642606206092, |
|
"learning_rate": 8.339149217116844e-06, |
|
"loss": 0.6862, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0253521126760563, |
|
"grad_norm": 0.8307345169299164, |
|
"learning_rate": 8.32692704995507e-06, |
|
"loss": 0.5706, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.028169014084507, |
|
"grad_norm": 0.7979429932796829, |
|
"learning_rate": 8.314669105159363e-06, |
|
"loss": 0.7012, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0309859154929577, |
|
"grad_norm": 0.7731130165738759, |
|
"learning_rate": 8.302375514551147e-06, |
|
"loss": 0.6017, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0338028169014084, |
|
"grad_norm": 0.808354828090498, |
|
"learning_rate": 8.29004641033518e-06, |
|
"loss": 0.6374, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.036619718309859, |
|
"grad_norm": 0.9435485675478585, |
|
"learning_rate": 8.277681925098133e-06, |
|
"loss": 0.6787, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0394366197183098, |
|
"grad_norm": 0.8129322510408442, |
|
"learning_rate": 8.26528219180716e-06, |
|
"loss": 0.5717, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.0422535211267605, |
|
"grad_norm": 0.7952569015637464, |
|
"learning_rate": 8.252847343808473e-06, |
|
"loss": 0.6102, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0450704225352112, |
|
"grad_norm": 0.7372836570712195, |
|
"learning_rate": 8.240377514825906e-06, |
|
"loss": 0.6064, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.047887323943662, |
|
"grad_norm": 0.700959266674204, |
|
"learning_rate": 8.227872838959478e-06, |
|
"loss": 0.5533, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.0507042253521126, |
|
"grad_norm": 0.7392699999637229, |
|
"learning_rate": 8.215333450683945e-06, |
|
"loss": 0.6194, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.0535211267605633, |
|
"grad_norm": 0.8016719369071104, |
|
"learning_rate": 8.202759484847366e-06, |
|
"loss": 0.6321, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.056338028169014, |
|
"grad_norm": 0.879203616291769, |
|
"learning_rate": 8.19015107666964e-06, |
|
"loss": 0.6167, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0591549295774647, |
|
"grad_norm": 0.7999750323120932, |
|
"learning_rate": 8.177508361741063e-06, |
|
"loss": 0.61, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.0619718309859154, |
|
"grad_norm": 1.2853051561846651, |
|
"learning_rate": 8.164831476020856e-06, |
|
"loss": 0.7454, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.064788732394366, |
|
"grad_norm": 0.8055398755078872, |
|
"learning_rate": 8.15212055583572e-06, |
|
"loss": 0.6196, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.0676056338028168, |
|
"grad_norm": 0.7446253097717447, |
|
"learning_rate": 8.139375737878356e-06, |
|
"loss": 0.6322, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.0704225352112675, |
|
"grad_norm": 0.8411611220367352, |
|
"learning_rate": 8.126597159206002e-06, |
|
"loss": 0.6148, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0732394366197182, |
|
"grad_norm": 0.7097418449161869, |
|
"learning_rate": 8.113784957238957e-06, |
|
"loss": 0.5783, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.076056338028169, |
|
"grad_norm": 0.9215534743065565, |
|
"learning_rate": 8.100939269759103e-06, |
|
"loss": 0.636, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0788732394366196, |
|
"grad_norm": 0.9640442672292225, |
|
"learning_rate": 8.088060234908425e-06, |
|
"loss": 0.6169, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0816901408450703, |
|
"grad_norm": 0.7562932638842584, |
|
"learning_rate": 8.075147991187521e-06, |
|
"loss": 0.626, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.084507042253521, |
|
"grad_norm": 0.7398497359039935, |
|
"learning_rate": 8.062202677454123e-06, |
|
"loss": 0.6072, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.0873239436619717, |
|
"grad_norm": 0.7834989845435792, |
|
"learning_rate": 8.04922443292159e-06, |
|
"loss": 0.6587, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0901408450704226, |
|
"grad_norm": 0.8109941310202463, |
|
"learning_rate": 8.036213397157418e-06, |
|
"loss": 0.5767, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0929577464788733, |
|
"grad_norm": 0.867677796061852, |
|
"learning_rate": 8.02316971008174e-06, |
|
"loss": 0.6495, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.095774647887324, |
|
"grad_norm": 0.8568914788227452, |
|
"learning_rate": 8.01009351196582e-06, |
|
"loss": 0.6608, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.0985915492957747, |
|
"grad_norm": 0.7414767319860525, |
|
"learning_rate": 7.996984943430544e-06, |
|
"loss": 0.6201, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1014084507042254, |
|
"grad_norm": 0.7712950858852734, |
|
"learning_rate": 7.983844145444908e-06, |
|
"loss": 0.6559, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1042253521126761, |
|
"grad_norm": 0.7279417190285391, |
|
"learning_rate": 7.970671259324502e-06, |
|
"loss": 0.6579, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1070422535211268, |
|
"grad_norm": 0.6809679469757443, |
|
"learning_rate": 7.957466426729995e-06, |
|
"loss": 0.5586, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1098591549295775, |
|
"grad_norm": 0.7395336563334385, |
|
"learning_rate": 7.944229789665595e-06, |
|
"loss": 0.6107, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1126760563380282, |
|
"grad_norm": 0.7800851945961862, |
|
"learning_rate": 7.930961490477546e-06, |
|
"loss": 0.6775, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.115492957746479, |
|
"grad_norm": 0.7428766032430452, |
|
"learning_rate": 7.917661671852582e-06, |
|
"loss": 0.5768, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.1183098591549296, |
|
"grad_norm": 0.748113720663839, |
|
"learning_rate": 7.904330476816391e-06, |
|
"loss": 0.6391, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1211267605633803, |
|
"grad_norm": 0.7602498335490296, |
|
"learning_rate": 7.890968048732091e-06, |
|
"loss": 0.6414, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.123943661971831, |
|
"grad_norm": 0.7233012193214933, |
|
"learning_rate": 7.877574531298666e-06, |
|
"loss": 0.6122, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1267605633802817, |
|
"grad_norm": 0.7021500909845154, |
|
"learning_rate": 7.864150068549446e-06, |
|
"loss": 0.5922, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1295774647887324, |
|
"grad_norm": 0.7106078546209356, |
|
"learning_rate": 7.850694804850538e-06, |
|
"loss": 0.6049, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1323943661971831, |
|
"grad_norm": 0.7065107396130941, |
|
"learning_rate": 7.837208884899283e-06, |
|
"loss": 0.5874, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1352112676056338, |
|
"grad_norm": 0.7102875370949132, |
|
"learning_rate": 7.823692453722701e-06, |
|
"loss": 0.6108, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.1380281690140845, |
|
"grad_norm": 0.7912610129085115, |
|
"learning_rate": 7.810145656675923e-06, |
|
"loss": 0.604, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.1408450704225352, |
|
"grad_norm": 0.737281408366629, |
|
"learning_rate": 7.796568639440635e-06, |
|
"loss": 0.5719, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.143661971830986, |
|
"grad_norm": 0.7670922935606069, |
|
"learning_rate": 7.782961548023515e-06, |
|
"loss": 0.5963, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.1464788732394366, |
|
"grad_norm": 0.7641025205212992, |
|
"learning_rate": 7.769324528754653e-06, |
|
"loss": 0.6672, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.1492957746478873, |
|
"grad_norm": 0.7823576553904255, |
|
"learning_rate": 7.755657728285979e-06, |
|
"loss": 0.6518, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.152112676056338, |
|
"grad_norm": 0.768517790974834, |
|
"learning_rate": 7.741961293589693e-06, |
|
"loss": 0.5925, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.1549295774647887, |
|
"grad_norm": 0.8178224875850133, |
|
"learning_rate": 7.728235371956678e-06, |
|
"loss": 0.6393, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1577464788732394, |
|
"grad_norm": 0.7593574451798548, |
|
"learning_rate": 7.714480110994922e-06, |
|
"loss": 0.5932, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.1605633802816901, |
|
"grad_norm": 0.816042257929934, |
|
"learning_rate": 7.700695658627924e-06, |
|
"loss": 0.6303, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.1633802816901408, |
|
"grad_norm": 0.7790772874456501, |
|
"learning_rate": 7.686882163093106e-06, |
|
"loss": 0.6387, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.1661971830985915, |
|
"grad_norm": 0.759549985343405, |
|
"learning_rate": 7.673039772940218e-06, |
|
"loss": 0.6733, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.1690140845070423, |
|
"grad_norm": 0.8435272915696094, |
|
"learning_rate": 7.659168637029746e-06, |
|
"loss": 0.7193, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.171830985915493, |
|
"grad_norm": 0.6766200231780173, |
|
"learning_rate": 7.6452689045313e-06, |
|
"loss": 0.5821, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.1746478873239437, |
|
"grad_norm": 0.7660868612697938, |
|
"learning_rate": 7.631340724922023e-06, |
|
"loss": 0.6028, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.1774647887323944, |
|
"grad_norm": 0.7708935275497972, |
|
"learning_rate": 7.617384247984973e-06, |
|
"loss": 0.6066, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.180281690140845, |
|
"grad_norm": 0.7188801905334284, |
|
"learning_rate": 7.603399623807519e-06, |
|
"loss": 0.6452, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.1830985915492958, |
|
"grad_norm": 0.6894638678128713, |
|
"learning_rate": 7.589387002779722e-06, |
|
"loss": 0.5232, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1859154929577465, |
|
"grad_norm": 0.7428721573686932, |
|
"learning_rate": 7.575346535592721e-06, |
|
"loss": 0.5196, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.1887323943661972, |
|
"grad_norm": 0.7167711000069285, |
|
"learning_rate": 7.561278373237108e-06, |
|
"loss": 0.5482, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.1915492957746479, |
|
"grad_norm": 0.755690004522166, |
|
"learning_rate": 7.547182667001316e-06, |
|
"loss": 0.6087, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.1943661971830986, |
|
"grad_norm": 0.7231715291470082, |
|
"learning_rate": 7.5330595684699735e-06, |
|
"loss": 0.6187, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.1971830985915493, |
|
"grad_norm": 0.7805865030943964, |
|
"learning_rate": 7.5189092295222945e-06, |
|
"loss": 0.5708, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.7398169564046222, |
|
"learning_rate": 7.504731802330427e-06, |
|
"loss": 0.6753, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.2028169014084507, |
|
"grad_norm": 0.7283567187045813, |
|
"learning_rate": 7.49052743935783e-06, |
|
"loss": 0.6255, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2056338028169014, |
|
"grad_norm": 0.7503448710021737, |
|
"learning_rate": 7.476296293357626e-06, |
|
"loss": 0.6657, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.208450704225352, |
|
"grad_norm": 0.7434547390985952, |
|
"learning_rate": 7.462038517370962e-06, |
|
"loss": 0.69, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2112676056338028, |
|
"grad_norm": 0.904002334343554, |
|
"learning_rate": 7.4477542647253645e-06, |
|
"loss": 0.7437, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2140845070422535, |
|
"grad_norm": 0.6929989080088196, |
|
"learning_rate": 7.4334436890330845e-06, |
|
"loss": 0.5423, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2169014084507042, |
|
"grad_norm": 0.7444154551200957, |
|
"learning_rate": 7.4191069441894555e-06, |
|
"loss": 0.5752, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2197183098591549, |
|
"grad_norm": 0.8200086939143616, |
|
"learning_rate": 7.404744184371229e-06, |
|
"loss": 0.6005, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.2225352112676056, |
|
"grad_norm": 0.7685321740288726, |
|
"learning_rate": 7.39035556403492e-06, |
|
"loss": 0.6407, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.2253521126760563, |
|
"grad_norm": 0.7496357556269256, |
|
"learning_rate": 7.375941237915151e-06, |
|
"loss": 0.6213, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.228169014084507, |
|
"grad_norm": 0.7143102894255702, |
|
"learning_rate": 7.361501361022978e-06, |
|
"loss": 0.6168, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.2309859154929577, |
|
"grad_norm": 0.7309978741831288, |
|
"learning_rate": 7.347036088644232e-06, |
|
"loss": 0.633, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2338028169014084, |
|
"grad_norm": 0.6905063561288732, |
|
"learning_rate": 7.3325455763378465e-06, |
|
"loss": 0.577, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.236619718309859, |
|
"grad_norm": 0.7022680079706316, |
|
"learning_rate": 7.318029979934181e-06, |
|
"loss": 0.5884, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2394366197183098, |
|
"grad_norm": 0.7307996255503993, |
|
"learning_rate": 7.303489455533352e-06, |
|
"loss": 0.6065, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2422535211267607, |
|
"grad_norm": 0.8189596586259464, |
|
"learning_rate": 7.288924159503549e-06, |
|
"loss": 0.6288, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.2450704225352114, |
|
"grad_norm": 0.7319103016752759, |
|
"learning_rate": 7.274334248479353e-06, |
|
"loss": 0.6402, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.247887323943662, |
|
"grad_norm": 0.6812316396156196, |
|
"learning_rate": 7.259719879360054e-06, |
|
"loss": 0.6013, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.2507042253521128, |
|
"grad_norm": 0.7654316061184241, |
|
"learning_rate": 7.2450812093079695e-06, |
|
"loss": 0.6302, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.2535211267605635, |
|
"grad_norm": 0.7768746107857665, |
|
"learning_rate": 7.2304183957467385e-06, |
|
"loss": 0.6225, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.2563380281690142, |
|
"grad_norm": 0.7430991023758472, |
|
"learning_rate": 7.215731596359645e-06, |
|
"loss": 0.6406, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.2591549295774649, |
|
"grad_norm": 0.7625029621832455, |
|
"learning_rate": 7.201020969087913e-06, |
|
"loss": 0.6042, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.2619718309859156, |
|
"grad_norm": 0.7489522138035615, |
|
"learning_rate": 7.18628667212901e-06, |
|
"loss": 0.6033, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.2647887323943663, |
|
"grad_norm": 0.7028238677579237, |
|
"learning_rate": 7.17152886393495e-06, |
|
"loss": 0.5917, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.267605633802817, |
|
"grad_norm": 0.8034495427467496, |
|
"learning_rate": 7.15674770321058e-06, |
|
"loss": 0.572, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2704225352112677, |
|
"grad_norm": 0.7066497937705736, |
|
"learning_rate": 7.141943348911885e-06, |
|
"loss": 0.6047, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.2732394366197184, |
|
"grad_norm": 0.7798266546615258, |
|
"learning_rate": 7.127115960244269e-06, |
|
"loss": 0.6074, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.276056338028169, |
|
"grad_norm": 0.7383424636778058, |
|
"learning_rate": 7.112265696660848e-06, |
|
"loss": 0.5874, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.2788732394366198, |
|
"grad_norm": 0.7327825003857283, |
|
"learning_rate": 7.0973927178607335e-06, |
|
"loss": 0.5926, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.2816901408450705, |
|
"grad_norm": 0.6688467886867253, |
|
"learning_rate": 7.0824971837873154e-06, |
|
"loss": 0.6032, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.2845070422535212, |
|
"grad_norm": 0.71887769142022, |
|
"learning_rate": 7.067579254626543e-06, |
|
"loss": 0.6137, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.287323943661972, |
|
"grad_norm": 0.7646631375464373, |
|
"learning_rate": 7.0526390908052e-06, |
|
"loss": 0.5687, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.2901408450704226, |
|
"grad_norm": 0.7061537907237097, |
|
"learning_rate": 7.037676852989182e-06, |
|
"loss": 0.5671, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2929577464788733, |
|
"grad_norm": 0.7702181592631588, |
|
"learning_rate": 7.022692702081766e-06, |
|
"loss": 0.5986, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.295774647887324, |
|
"grad_norm": 0.7539785898071507, |
|
"learning_rate": 7.007686799221882e-06, |
|
"loss": 0.5724, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2985915492957747, |
|
"grad_norm": 0.800745890577906, |
|
"learning_rate": 6.992659305782381e-06, |
|
"loss": 0.5959, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3014084507042254, |
|
"grad_norm": 0.7599227930304122, |
|
"learning_rate": 6.977610383368296e-06, |
|
"loss": 0.6145, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.304225352112676, |
|
"grad_norm": 0.7412143690063574, |
|
"learning_rate": 6.9625401938151085e-06, |
|
"loss": 0.5796, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.3070422535211268, |
|
"grad_norm": 0.6670323564590958, |
|
"learning_rate": 6.947448899187004e-06, |
|
"loss": 0.5279, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3098591549295775, |
|
"grad_norm": 0.7448132221148877, |
|
"learning_rate": 6.932336661775132e-06, |
|
"loss": 0.517, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3126760563380282, |
|
"grad_norm": 0.7633666757686683, |
|
"learning_rate": 6.91720364409586e-06, |
|
"loss": 0.5525, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.315492957746479, |
|
"grad_norm": 0.7901094664068569, |
|
"learning_rate": 6.902050008889024e-06, |
|
"loss": 0.6333, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.3183098591549296, |
|
"grad_norm": 0.7417299545983219, |
|
"learning_rate": 6.886875919116184e-06, |
|
"loss": 0.6238, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3211267605633803, |
|
"grad_norm": 0.8396209754400242, |
|
"learning_rate": 6.871681537958862e-06, |
|
"loss": 0.7036, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.323943661971831, |
|
"grad_norm": 0.6793697034990046, |
|
"learning_rate": 6.856467028816797e-06, |
|
"loss": 0.6093, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3267605633802817, |
|
"grad_norm": 0.717578251040186, |
|
"learning_rate": 6.841232555306181e-06, |
|
"loss": 0.6089, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.3295774647887324, |
|
"grad_norm": 0.7334395938548597, |
|
"learning_rate": 6.825978281257905e-06, |
|
"loss": 0.6266, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.332394366197183, |
|
"grad_norm": 0.7920253133081248, |
|
"learning_rate": 6.810704370715791e-06, |
|
"loss": 0.6245, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3352112676056338, |
|
"grad_norm": 0.6558719992840684, |
|
"learning_rate": 6.795410987934834e-06, |
|
"loss": 0.5258, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.3380281690140845, |
|
"grad_norm": 0.6753573860958215, |
|
"learning_rate": 6.780098297379427e-06, |
|
"loss": 0.617, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.3408450704225352, |
|
"grad_norm": 0.7579142416021319, |
|
"learning_rate": 6.764766463721605e-06, |
|
"loss": 0.6232, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.343661971830986, |
|
"grad_norm": 0.8426339130047485, |
|
"learning_rate": 6.7494156518392625e-06, |
|
"loss": 0.7301, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.3464788732394366, |
|
"grad_norm": 0.7100296420326655, |
|
"learning_rate": 6.734046026814388e-06, |
|
"loss": 0.5676, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.3492957746478873, |
|
"grad_norm": 0.7140268014097398, |
|
"learning_rate": 6.718657753931284e-06, |
|
"loss": 0.5997, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.352112676056338, |
|
"grad_norm": 0.7157881332867808, |
|
"learning_rate": 6.70325099867479e-06, |
|
"loss": 0.5406, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.3549295774647887, |
|
"grad_norm": 0.7845113582925589, |
|
"learning_rate": 6.687825926728506e-06, |
|
"loss": 0.6069, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.3577464788732394, |
|
"grad_norm": 0.7758617296188998, |
|
"learning_rate": 6.672382703973011e-06, |
|
"loss": 0.6064, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.36056338028169, |
|
"grad_norm": 0.6839736903915518, |
|
"learning_rate": 6.65692149648407e-06, |
|
"loss": 0.5659, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.3633802816901408, |
|
"grad_norm": 0.8033535577918636, |
|
"learning_rate": 6.641442470530866e-06, |
|
"loss": 0.642, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.3661971830985915, |
|
"grad_norm": 0.706908397171264, |
|
"learning_rate": 6.62594579257419e-06, |
|
"loss": 0.5262, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.3690140845070422, |
|
"grad_norm": 0.7942995133323388, |
|
"learning_rate": 6.610431629264669e-06, |
|
"loss": 0.6028, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.371830985915493, |
|
"grad_norm": 0.7737746271917008, |
|
"learning_rate": 6.594900147440963e-06, |
|
"loss": 0.6341, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.3746478873239436, |
|
"grad_norm": 0.7760559302932023, |
|
"learning_rate": 6.579351514127976e-06, |
|
"loss": 0.6546, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.3774647887323943, |
|
"grad_norm": 0.7827297911698344, |
|
"learning_rate": 6.56378589653506e-06, |
|
"loss": 0.6294, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.380281690140845, |
|
"grad_norm": 0.7071080299842922, |
|
"learning_rate": 6.548203462054211e-06, |
|
"loss": 0.601, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3830985915492957, |
|
"grad_norm": 0.7146539646324296, |
|
"learning_rate": 6.5326043782582785e-06, |
|
"loss": 0.5647, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.3859154929577464, |
|
"grad_norm": 0.8087039401070861, |
|
"learning_rate": 6.516988812899154e-06, |
|
"loss": 0.6269, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.388732394366197, |
|
"grad_norm": 0.6966320976971616, |
|
"learning_rate": 6.501356933905973e-06, |
|
"loss": 0.5688, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.3915492957746478, |
|
"grad_norm": 0.756507465924507, |
|
"learning_rate": 6.485708909383306e-06, |
|
"loss": 0.627, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.3943661971830985, |
|
"grad_norm": 0.6901180254887062, |
|
"learning_rate": 6.4700449076093515e-06, |
|
"loss": 0.6092, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3971830985915492, |
|
"grad_norm": 0.7803910885700782, |
|
"learning_rate": 6.454365097034127e-06, |
|
"loss": 0.6495, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.7890702362457062, |
|
"learning_rate": 6.43866964627766e-06, |
|
"loss": 0.6578, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.4028169014084506, |
|
"grad_norm": 0.8090766090233742, |
|
"learning_rate": 6.422958724128169e-06, |
|
"loss": 0.6242, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.4056338028169013, |
|
"grad_norm": 0.690851480741414, |
|
"learning_rate": 6.40723249954025e-06, |
|
"loss": 0.6112, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.408450704225352, |
|
"grad_norm": 0.7180545063167437, |
|
"learning_rate": 6.391491141633064e-06, |
|
"loss": 0.6094, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4112676056338027, |
|
"grad_norm": 0.7761047381057276, |
|
"learning_rate": 6.375734819688514e-06, |
|
"loss": 0.6498, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.4140845070422534, |
|
"grad_norm": 0.7086180360990414, |
|
"learning_rate": 6.359963703149424e-06, |
|
"loss": 0.5708, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.4169014084507041, |
|
"grad_norm": 0.7137027645516033, |
|
"learning_rate": 6.344177961617719e-06, |
|
"loss": 0.6064, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.4197183098591548, |
|
"grad_norm": 0.8064327336753806, |
|
"learning_rate": 6.3283777648526035e-06, |
|
"loss": 0.6317, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4225352112676055, |
|
"grad_norm": 0.7651646342531121, |
|
"learning_rate": 6.312563282768729e-06, |
|
"loss": 0.6288, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4253521126760562, |
|
"grad_norm": 0.6721576291974353, |
|
"learning_rate": 6.296734685434373e-06, |
|
"loss": 0.5563, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.428169014084507, |
|
"grad_norm": 0.7300091598809011, |
|
"learning_rate": 6.280892143069607e-06, |
|
"loss": 0.6739, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.4309859154929576, |
|
"grad_norm": 0.7631685066742252, |
|
"learning_rate": 6.265035826044467e-06, |
|
"loss": 0.6372, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.4338028169014083, |
|
"grad_norm": 0.7434045630709879, |
|
"learning_rate": 6.2491659048771215e-06, |
|
"loss": 0.6554, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.436619718309859, |
|
"grad_norm": 0.7167854438600025, |
|
"learning_rate": 6.233282550232036e-06, |
|
"loss": 0.6308, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.43943661971831, |
|
"grad_norm": 0.7054646693964248, |
|
"learning_rate": 6.217385932918141e-06, |
|
"loss": 0.5772, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.4422535211267606, |
|
"grad_norm": 0.7566300729630193, |
|
"learning_rate": 6.201476223886993e-06, |
|
"loss": 0.6576, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.4450704225352113, |
|
"grad_norm": 0.7375259475887378, |
|
"learning_rate": 6.185553594230934e-06, |
|
"loss": 0.6575, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.447887323943662, |
|
"grad_norm": 0.7209828197635741, |
|
"learning_rate": 6.169618215181256e-06, |
|
"loss": 0.5953, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.4507042253521127, |
|
"grad_norm": 0.70996297479465, |
|
"learning_rate": 6.153670258106356e-06, |
|
"loss": 0.5975, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.4535211267605634, |
|
"grad_norm": 0.7185389101707407, |
|
"learning_rate": 6.137709894509898e-06, |
|
"loss": 0.5733, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.4563380281690141, |
|
"grad_norm": 0.7916008805778101, |
|
"learning_rate": 6.121737296028959e-06, |
|
"loss": 0.6608, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.4591549295774648, |
|
"grad_norm": 0.7055343001343649, |
|
"learning_rate": 6.105752634432195e-06, |
|
"loss": 0.6563, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.4619718309859155, |
|
"grad_norm": 0.6662761212311629, |
|
"learning_rate": 6.089756081617987e-06, |
|
"loss": 0.5828, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.4647887323943662, |
|
"grad_norm": 0.7271893463200743, |
|
"learning_rate": 6.073747809612591e-06, |
|
"loss": 0.6323, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.467605633802817, |
|
"grad_norm": 0.7413143561609734, |
|
"learning_rate": 6.057727990568294e-06, |
|
"loss": 0.6233, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.4704225352112676, |
|
"grad_norm": 0.8068322899404378, |
|
"learning_rate": 6.041696796761558e-06, |
|
"loss": 0.6884, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.4732394366197183, |
|
"grad_norm": 0.76675704278936, |
|
"learning_rate": 6.025654400591167e-06, |
|
"loss": 0.6514, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.476056338028169, |
|
"grad_norm": 0.6978616412021054, |
|
"learning_rate": 6.009600974576378e-06, |
|
"loss": 0.5814, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.4788732394366197, |
|
"grad_norm": 0.7367626943759139, |
|
"learning_rate": 5.9935366913550615e-06, |
|
"loss": 0.61, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.4816901408450704, |
|
"grad_norm": 0.803924245325719, |
|
"learning_rate": 5.977461723681845e-06, |
|
"loss": 0.6201, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.4845070422535211, |
|
"grad_norm": 0.7166646169196906, |
|
"learning_rate": 5.961376244426256e-06, |
|
"loss": 0.5891, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.4873239436619718, |
|
"grad_norm": 0.7830895265196487, |
|
"learning_rate": 5.945280426570862e-06, |
|
"loss": 0.6437, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4901408450704225, |
|
"grad_norm": 0.7077569282266005, |
|
"learning_rate": 5.929174443209416e-06, |
|
"loss": 0.5941, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.4929577464788732, |
|
"grad_norm": 0.7255908623917363, |
|
"learning_rate": 5.913058467544984e-06, |
|
"loss": 0.6425, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.495774647887324, |
|
"grad_norm": 0.6940217061921031, |
|
"learning_rate": 5.8969326728880964e-06, |
|
"loss": 0.615, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.4985915492957746, |
|
"grad_norm": 0.741185242757091, |
|
"learning_rate": 5.880797232654869e-06, |
|
"loss": 0.6347, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.5014084507042254, |
|
"grad_norm": 0.7501146031536312, |
|
"learning_rate": 5.8646523203651514e-06, |
|
"loss": 0.5906, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.504225352112676, |
|
"grad_norm": 0.7363356259610467, |
|
"learning_rate": 5.848498109640652e-06, |
|
"loss": 0.639, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.5070422535211268, |
|
"grad_norm": 0.7428030589984295, |
|
"learning_rate": 5.832334774203076e-06, |
|
"loss": 0.6342, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5098591549295775, |
|
"grad_norm": 0.7673144526793103, |
|
"learning_rate": 5.8161624878722545e-06, |
|
"loss": 0.5874, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.5126760563380282, |
|
"grad_norm": 0.7650544824750333, |
|
"learning_rate": 5.799981424564275e-06, |
|
"loss": 0.6566, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.5154929577464789, |
|
"grad_norm": 0.7489586669750536, |
|
"learning_rate": 5.7837917582896145e-06, |
|
"loss": 0.6858, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.5183098591549296, |
|
"grad_norm": 0.7121890274043278, |
|
"learning_rate": 5.767593663151265e-06, |
|
"loss": 0.5805, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.5211267605633803, |
|
"grad_norm": 0.745809225911286, |
|
"learning_rate": 5.751387313342863e-06, |
|
"loss": 0.6756, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.523943661971831, |
|
"grad_norm": 0.7320056881511781, |
|
"learning_rate": 5.735172883146813e-06, |
|
"loss": 0.588, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.5267605633802817, |
|
"grad_norm": 0.7941805383744988, |
|
"learning_rate": 5.718950546932418e-06, |
|
"loss": 0.6574, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.5295774647887324, |
|
"grad_norm": 0.7249778426938815, |
|
"learning_rate": 5.702720479154001e-06, |
|
"loss": 0.636, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.532394366197183, |
|
"grad_norm": 1.0320714729311038, |
|
"learning_rate": 5.686482854349029e-06, |
|
"loss": 0.6839, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.5352112676056338, |
|
"grad_norm": 0.7329898223205343, |
|
"learning_rate": 5.6702378471362394e-06, |
|
"loss": 0.5909, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.5380281690140845, |
|
"grad_norm": 0.8189575127373967, |
|
"learning_rate": 5.653985632213758e-06, |
|
"loss": 0.6449, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.5408450704225352, |
|
"grad_norm": 0.733175797653608, |
|
"learning_rate": 5.637726384357222e-06, |
|
"loss": 0.613, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.543661971830986, |
|
"grad_norm": 0.8383511722961089, |
|
"learning_rate": 5.621460278417901e-06, |
|
"loss": 0.6849, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.5464788732394368, |
|
"grad_norm": 0.7237452460851425, |
|
"learning_rate": 5.605187489320815e-06, |
|
"loss": 0.6656, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.5492957746478875, |
|
"grad_norm": 0.8208760472428147, |
|
"learning_rate": 5.588908192062858e-06, |
|
"loss": 0.7055, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5521126760563382, |
|
"grad_norm": 0.8069099779678623, |
|
"learning_rate": 5.572622561710906e-06, |
|
"loss": 0.6956, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.5549295774647889, |
|
"grad_norm": 0.7668293000678549, |
|
"learning_rate": 5.556330773399948e-06, |
|
"loss": 0.5757, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.5577464788732396, |
|
"grad_norm": 0.6893187298629392, |
|
"learning_rate": 5.54003300233119e-06, |
|
"loss": 0.5799, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.5605633802816903, |
|
"grad_norm": 0.8113423411383069, |
|
"learning_rate": 5.52372942377018e-06, |
|
"loss": 0.6565, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.563380281690141, |
|
"grad_norm": 0.711919002112785, |
|
"learning_rate": 5.507420213044915e-06, |
|
"loss": 0.5681, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.5661971830985917, |
|
"grad_norm": 0.7266032009645466, |
|
"learning_rate": 5.491105545543966e-06, |
|
"loss": 0.5502, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.5690140845070424, |
|
"grad_norm": 0.7199568163075004, |
|
"learning_rate": 5.474785596714581e-06, |
|
"loss": 0.6467, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.571830985915493, |
|
"grad_norm": 0.7273194518859644, |
|
"learning_rate": 5.458460542060807e-06, |
|
"loss": 0.6287, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.5746478873239438, |
|
"grad_norm": 0.7195884793791211, |
|
"learning_rate": 5.442130557141595e-06, |
|
"loss": 0.629, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.5774647887323945, |
|
"grad_norm": 0.7711408798912917, |
|
"learning_rate": 5.42579581756892e-06, |
|
"loss": 0.6274, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.5802816901408452, |
|
"grad_norm": 0.7321868021772925, |
|
"learning_rate": 5.409456499005883e-06, |
|
"loss": 0.6348, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.5830985915492959, |
|
"grad_norm": 1.6749686976248583, |
|
"learning_rate": 5.393112777164834e-06, |
|
"loss": 0.6977, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.5859154929577466, |
|
"grad_norm": 0.739313690634755, |
|
"learning_rate": 5.376764827805468e-06, |
|
"loss": 0.5653, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.5887323943661973, |
|
"grad_norm": 0.6890541921547552, |
|
"learning_rate": 5.36041282673295e-06, |
|
"loss": 0.6276, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.591549295774648, |
|
"grad_norm": 0.7082761122225983, |
|
"learning_rate": 5.3440569497960126e-06, |
|
"loss": 0.5671, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.5943661971830987, |
|
"grad_norm": 0.7540757628476714, |
|
"learning_rate": 5.32769737288507e-06, |
|
"loss": 0.6114, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.5971830985915494, |
|
"grad_norm": 0.7259622552539723, |
|
"learning_rate": 5.3113342719303266e-06, |
|
"loss": 0.5529, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.780097835188365, |
|
"learning_rate": 5.294967822899882e-06, |
|
"loss": 0.6486, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.6028169014084508, |
|
"grad_norm": 0.7692463121508737, |
|
"learning_rate": 5.278598201797844e-06, |
|
"loss": 0.6463, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.6056338028169015, |
|
"grad_norm": 0.7180689651375777, |
|
"learning_rate": 5.262225584662431e-06, |
|
"loss": 0.6551, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6084507042253522, |
|
"grad_norm": 0.7570672686476732, |
|
"learning_rate": 5.245850147564081e-06, |
|
"loss": 0.6694, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.611267605633803, |
|
"grad_norm": 0.8163493286833097, |
|
"learning_rate": 5.229472066603558e-06, |
|
"loss": 0.71, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.6140845070422536, |
|
"grad_norm": 0.7627020393879351, |
|
"learning_rate": 5.213091517910056e-06, |
|
"loss": 0.6245, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.6169014084507043, |
|
"grad_norm": 0.788407246648653, |
|
"learning_rate": 5.196708677639311e-06, |
|
"loss": 0.623, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.619718309859155, |
|
"grad_norm": 0.7596410758168111, |
|
"learning_rate": 5.180323721971696e-06, |
|
"loss": 0.6165, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.6225352112676057, |
|
"grad_norm": 0.815723603914329, |
|
"learning_rate": 5.163936827110342e-06, |
|
"loss": 0.6754, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.6253521126760564, |
|
"grad_norm": 0.7348793453974766, |
|
"learning_rate": 5.1475481692792235e-06, |
|
"loss": 0.6324, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.628169014084507, |
|
"grad_norm": 0.9082203242567118, |
|
"learning_rate": 5.131157924721285e-06, |
|
"loss": 0.5768, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.6309859154929578, |
|
"grad_norm": 0.7619495779921246, |
|
"learning_rate": 5.1147662696965254e-06, |
|
"loss": 0.6328, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.6338028169014085, |
|
"grad_norm": 0.7410091511557558, |
|
"learning_rate": 5.098373380480114e-06, |
|
"loss": 0.6487, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.6366197183098592, |
|
"grad_norm": 0.7060847762225472, |
|
"learning_rate": 5.081979433360498e-06, |
|
"loss": 0.5852, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.63943661971831, |
|
"grad_norm": 0.7239693134616567, |
|
"learning_rate": 5.065584604637492e-06, |
|
"loss": 0.5882, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.6422535211267606, |
|
"grad_norm": 0.814065239966475, |
|
"learning_rate": 5.0491890706204e-06, |
|
"loss": 0.6785, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.6450704225352113, |
|
"grad_norm": 0.801293226086549, |
|
"learning_rate": 5.0327930076261065e-06, |
|
"loss": 0.6055, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.647887323943662, |
|
"grad_norm": 0.7336660465183735, |
|
"learning_rate": 5.0163965919771855e-06, |
|
"loss": 0.5906, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.6507042253521127, |
|
"grad_norm": 0.7350029739964772, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6355, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.6535211267605634, |
|
"grad_norm": 0.6673037142207796, |
|
"learning_rate": 4.983603408022817e-06, |
|
"loss": 0.5716, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.656338028169014, |
|
"grad_norm": 0.7854183960568444, |
|
"learning_rate": 4.967206992373894e-06, |
|
"loss": 0.6507, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.6591549295774648, |
|
"grad_norm": 0.7320695718469675, |
|
"learning_rate": 4.9508109293796015e-06, |
|
"loss": 0.6632, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.6619718309859155, |
|
"grad_norm": 0.766444319869025, |
|
"learning_rate": 4.9344153953625095e-06, |
|
"loss": 0.6353, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6647887323943662, |
|
"grad_norm": 0.7832821327166929, |
|
"learning_rate": 4.918020566639505e-06, |
|
"loss": 0.5952, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.667605633802817, |
|
"grad_norm": 0.7342733424551008, |
|
"learning_rate": 4.901626619519888e-06, |
|
"loss": 0.6418, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.6704225352112676, |
|
"grad_norm": 0.7538627392333438, |
|
"learning_rate": 4.885233730303475e-06, |
|
"loss": 0.6097, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.6732394366197183, |
|
"grad_norm": 0.7286411962944247, |
|
"learning_rate": 4.868842075278717e-06, |
|
"loss": 0.5878, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.676056338028169, |
|
"grad_norm": 0.7891328898786883, |
|
"learning_rate": 4.852451830720777e-06, |
|
"loss": 0.5764, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.6788732394366197, |
|
"grad_norm": 0.740444597406536, |
|
"learning_rate": 4.83606317288966e-06, |
|
"loss": 0.585, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.6816901408450704, |
|
"grad_norm": 0.7170855566165547, |
|
"learning_rate": 4.819676278028305e-06, |
|
"loss": 0.5812, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.684507042253521, |
|
"grad_norm": 0.8099731590316525, |
|
"learning_rate": 4.803291322360691e-06, |
|
"loss": 0.658, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.6873239436619718, |
|
"grad_norm": 0.7500012004775211, |
|
"learning_rate": 4.7869084820899455e-06, |
|
"loss": 0.6297, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.6901408450704225, |
|
"grad_norm": 0.6934277195584079, |
|
"learning_rate": 4.7705279333964435e-06, |
|
"loss": 0.6066, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6929577464788732, |
|
"grad_norm": 0.856885643547699, |
|
"learning_rate": 4.75414985243592e-06, |
|
"loss": 0.6331, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.695774647887324, |
|
"grad_norm": 0.7104053919410432, |
|
"learning_rate": 4.73777441533757e-06, |
|
"loss": 0.5909, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.6985915492957746, |
|
"grad_norm": 0.7271263834548911, |
|
"learning_rate": 4.721401798202157e-06, |
|
"loss": 0.6467, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.7014084507042253, |
|
"grad_norm": 0.8176832728513387, |
|
"learning_rate": 4.70503217710012e-06, |
|
"loss": 0.7502, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.704225352112676, |
|
"grad_norm": 0.709756055906634, |
|
"learning_rate": 4.688665728069676e-06, |
|
"loss": 0.6424, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7070422535211267, |
|
"grad_norm": 0.7620569104794489, |
|
"learning_rate": 4.67230262711493e-06, |
|
"loss": 0.6172, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.7098591549295774, |
|
"grad_norm": 0.8174091737575135, |
|
"learning_rate": 4.655943050203987e-06, |
|
"loss": 0.5853, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.712676056338028, |
|
"grad_norm": 0.7587649322008173, |
|
"learning_rate": 4.63958717326705e-06, |
|
"loss": 0.6383, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.7154929577464788, |
|
"grad_norm": 0.7302121202735942, |
|
"learning_rate": 4.623235172194532e-06, |
|
"loss": 0.6067, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.7183098591549295, |
|
"grad_norm": 0.752762704354131, |
|
"learning_rate": 4.606887222835168e-06, |
|
"loss": 0.6459, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.7211267605633802, |
|
"grad_norm": 0.797352972135618, |
|
"learning_rate": 4.590543500994118e-06, |
|
"loss": 0.6666, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.723943661971831, |
|
"grad_norm": 0.7325137557683911, |
|
"learning_rate": 4.574204182431082e-06, |
|
"loss": 0.6217, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.7267605633802816, |
|
"grad_norm": 0.7827954004872983, |
|
"learning_rate": 4.557869442858406e-06, |
|
"loss": 0.5668, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.7295774647887323, |
|
"grad_norm": 0.7987182588008852, |
|
"learning_rate": 4.541539457939194e-06, |
|
"loss": 0.5635, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.732394366197183, |
|
"grad_norm": 0.7547789895804167, |
|
"learning_rate": 4.525214403285421e-06, |
|
"loss": 0.6542, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.7352112676056337, |
|
"grad_norm": 0.8107203967195239, |
|
"learning_rate": 4.5088944544560355e-06, |
|
"loss": 0.6506, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.7380281690140844, |
|
"grad_norm": 0.7857568585526324, |
|
"learning_rate": 4.4925797869550865e-06, |
|
"loss": 0.6939, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.7408450704225351, |
|
"grad_norm": 0.6899620281094992, |
|
"learning_rate": 4.476270576229823e-06, |
|
"loss": 0.543, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.7436619718309858, |
|
"grad_norm": 0.7325667426166168, |
|
"learning_rate": 4.459966997668812e-06, |
|
"loss": 0.5554, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.7464788732394365, |
|
"grad_norm": 0.7169652401516716, |
|
"learning_rate": 4.443669226600053e-06, |
|
"loss": 0.5877, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.7492957746478872, |
|
"grad_norm": 0.7707224813831788, |
|
"learning_rate": 4.427377438289095e-06, |
|
"loss": 0.703, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.752112676056338, |
|
"grad_norm": 0.7522757952093085, |
|
"learning_rate": 4.411091807937143e-06, |
|
"loss": 0.6164, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.7549295774647886, |
|
"grad_norm": 0.7773676405964031, |
|
"learning_rate": 4.3948125106791854e-06, |
|
"loss": 0.6613, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.7577464788732393, |
|
"grad_norm": 0.8050938893918458, |
|
"learning_rate": 4.3785397215821e-06, |
|
"loss": 0.6181, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.76056338028169, |
|
"grad_norm": 0.9245792115348718, |
|
"learning_rate": 4.362273615642779e-06, |
|
"loss": 0.6195, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.7633802816901407, |
|
"grad_norm": 0.696350239262708, |
|
"learning_rate": 4.346014367786243e-06, |
|
"loss": 0.5982, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.7661971830985914, |
|
"grad_norm": 0.7722146841214672, |
|
"learning_rate": 4.329762152863762e-06, |
|
"loss": 0.661, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.7690140845070421, |
|
"grad_norm": 0.7713052523645538, |
|
"learning_rate": 4.313517145650973e-06, |
|
"loss": 0.5939, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.7718309859154928, |
|
"grad_norm": 0.8249377635295999, |
|
"learning_rate": 4.297279520846002e-06, |
|
"loss": 0.6603, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.7746478873239435, |
|
"grad_norm": 0.8847154867126514, |
|
"learning_rate": 4.281049453067584e-06, |
|
"loss": 0.5965, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7774647887323942, |
|
"grad_norm": 0.7325092684105734, |
|
"learning_rate": 4.264827116853189e-06, |
|
"loss": 0.6029, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.780281690140845, |
|
"grad_norm": 0.7541904711913734, |
|
"learning_rate": 4.248612686657139e-06, |
|
"loss": 0.6091, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.7830985915492956, |
|
"grad_norm": 0.7433264124898937, |
|
"learning_rate": 4.232406336848734e-06, |
|
"loss": 0.6022, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.7859154929577463, |
|
"grad_norm": 0.7911613649574278, |
|
"learning_rate": 4.2162082417103855e-06, |
|
"loss": 0.6319, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.788732394366197, |
|
"grad_norm": 0.6962140364428401, |
|
"learning_rate": 4.200018575435726e-06, |
|
"loss": 0.6338, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.7915492957746477, |
|
"grad_norm": 0.8681521481970501, |
|
"learning_rate": 4.183837512127747e-06, |
|
"loss": 0.649, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.7943661971830986, |
|
"grad_norm": 0.64770897691298, |
|
"learning_rate": 4.167665225796925e-06, |
|
"loss": 0.56, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.7971830985915493, |
|
"grad_norm": 0.7494952905347187, |
|
"learning_rate": 4.1515018903593485e-06, |
|
"loss": 0.6356, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.7238165155027775, |
|
"learning_rate": 4.135347679634849e-06, |
|
"loss": 0.6387, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.8028169014084507, |
|
"grad_norm": 0.7785451847003492, |
|
"learning_rate": 4.119202767345132e-06, |
|
"loss": 0.6425, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8056338028169014, |
|
"grad_norm": 0.7697499616316977, |
|
"learning_rate": 4.103067327111905e-06, |
|
"loss": 0.6415, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.8084507042253521, |
|
"grad_norm": 0.8706556510248158, |
|
"learning_rate": 4.086941532455017e-06, |
|
"loss": 0.6575, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.8112676056338028, |
|
"grad_norm": 0.7643847624137132, |
|
"learning_rate": 4.070825556790587e-06, |
|
"loss": 0.6373, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.8140845070422535, |
|
"grad_norm": 0.7000485049052172, |
|
"learning_rate": 4.05471957342914e-06, |
|
"loss": 0.6102, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.8169014084507042, |
|
"grad_norm": 0.7006331724829398, |
|
"learning_rate": 4.0386237555737476e-06, |
|
"loss": 0.6893, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.819718309859155, |
|
"grad_norm": 0.7135091596231999, |
|
"learning_rate": 4.022538276318156e-06, |
|
"loss": 0.5599, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.8225352112676056, |
|
"grad_norm": 0.7186120810598129, |
|
"learning_rate": 4.006463308644939e-06, |
|
"loss": 0.6411, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.8253521126760563, |
|
"grad_norm": 0.72090386197526, |
|
"learning_rate": 3.990399025423622e-06, |
|
"loss": 0.5957, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.828169014084507, |
|
"grad_norm": 0.7324797526249632, |
|
"learning_rate": 3.974345599408833e-06, |
|
"loss": 0.6214, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.8309859154929577, |
|
"grad_norm": 0.7152179493525903, |
|
"learning_rate": 3.958303203238443e-06, |
|
"loss": 0.5994, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.8338028169014085, |
|
"grad_norm": 0.6721169614227414, |
|
"learning_rate": 3.942272009431707e-06, |
|
"loss": 0.5858, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.8366197183098592, |
|
"grad_norm": 0.7024310468965755, |
|
"learning_rate": 3.92625219038741e-06, |
|
"loss": 0.6533, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.8394366197183099, |
|
"grad_norm": 0.6777544089367332, |
|
"learning_rate": 3.910243918382015e-06, |
|
"loss": 0.5755, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.8422535211267606, |
|
"grad_norm": 0.7630416452716073, |
|
"learning_rate": 3.894247365567806e-06, |
|
"loss": 0.6498, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.8450704225352113, |
|
"grad_norm": 0.7242343956415039, |
|
"learning_rate": 3.878262703971043e-06, |
|
"loss": 0.6183, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.847887323943662, |
|
"grad_norm": 0.7546009817570479, |
|
"learning_rate": 3.8622901054901045e-06, |
|
"loss": 0.5648, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.8507042253521127, |
|
"grad_norm": 0.710778044894514, |
|
"learning_rate": 3.846329741893646e-06, |
|
"loss": 0.5615, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.8535211267605634, |
|
"grad_norm": 0.7241653075763682, |
|
"learning_rate": 3.830381784818746e-06, |
|
"loss": 0.5854, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.856338028169014, |
|
"grad_norm": 0.7251045277047325, |
|
"learning_rate": 3.814446405769069e-06, |
|
"loss": 0.6521, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.8591549295774648, |
|
"grad_norm": 0.7617065909179885, |
|
"learning_rate": 3.7985237761130077e-06, |
|
"loss": 0.6095, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.8619718309859155, |
|
"grad_norm": 0.7393276040780876, |
|
"learning_rate": 3.7826140670818597e-06, |
|
"loss": 0.6011, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.8647887323943662, |
|
"grad_norm": 0.8106406503473126, |
|
"learning_rate": 3.766717449767965e-06, |
|
"loss": 0.5952, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.8676056338028169, |
|
"grad_norm": 0.7306467838266344, |
|
"learning_rate": 3.75083409512288e-06, |
|
"loss": 0.5472, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.8704225352112676, |
|
"grad_norm": 0.7717881120947158, |
|
"learning_rate": 3.7349641739555342e-06, |
|
"loss": 0.6911, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.8732394366197183, |
|
"grad_norm": 0.7245110838917453, |
|
"learning_rate": 3.719107856930395e-06, |
|
"loss": 0.5893, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.8760563380281692, |
|
"grad_norm": 0.7023053711110873, |
|
"learning_rate": 3.703265314565629e-06, |
|
"loss": 0.607, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.8788732394366199, |
|
"grad_norm": 0.7369624598798884, |
|
"learning_rate": 3.687436717231273e-06, |
|
"loss": 0.6088, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.8816901408450706, |
|
"grad_norm": 0.761769778505356, |
|
"learning_rate": 3.6716222351473986e-06, |
|
"loss": 0.6301, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.8845070422535213, |
|
"grad_norm": 0.7269704420043447, |
|
"learning_rate": 3.6558220383822824e-06, |
|
"loss": 0.617, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.887323943661972, |
|
"grad_norm": 0.7842641739090692, |
|
"learning_rate": 3.6400362968505776e-06, |
|
"loss": 0.5754, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8901408450704227, |
|
"grad_norm": 0.7090787277322763, |
|
"learning_rate": 3.6242651803114876e-06, |
|
"loss": 0.5643, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.8929577464788734, |
|
"grad_norm": 0.8121920668052401, |
|
"learning_rate": 3.6085088583669368e-06, |
|
"loss": 0.6903, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.895774647887324, |
|
"grad_norm": 0.7887632376046526, |
|
"learning_rate": 3.59276750045975e-06, |
|
"loss": 0.6718, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.8985915492957748, |
|
"grad_norm": 0.7626415513119691, |
|
"learning_rate": 3.5770412758718317e-06, |
|
"loss": 0.6656, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.9014084507042255, |
|
"grad_norm": 0.697905106081836, |
|
"learning_rate": 3.56133035372234e-06, |
|
"loss": 0.623, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.9042253521126762, |
|
"grad_norm": 0.7531071089185666, |
|
"learning_rate": 3.545634902965873e-06, |
|
"loss": 0.592, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.9070422535211269, |
|
"grad_norm": 0.7524927476418592, |
|
"learning_rate": 3.52995509239065e-06, |
|
"loss": 0.6335, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.9098591549295776, |
|
"grad_norm": 0.804247562794407, |
|
"learning_rate": 3.514291090616696e-06, |
|
"loss": 0.6692, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.9126760563380283, |
|
"grad_norm": 0.7610688517320657, |
|
"learning_rate": 3.4986430660940283e-06, |
|
"loss": 0.5432, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.915492957746479, |
|
"grad_norm": 0.8240469545199084, |
|
"learning_rate": 3.483011187100847e-06, |
|
"loss": 0.7389, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.9183098591549297, |
|
"grad_norm": 0.789740332756857, |
|
"learning_rate": 3.4673956217417228e-06, |
|
"loss": 0.7044, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.9211267605633804, |
|
"grad_norm": 0.6593003096580221, |
|
"learning_rate": 3.451796537945791e-06, |
|
"loss": 0.5569, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.923943661971831, |
|
"grad_norm": 0.6735471334380218, |
|
"learning_rate": 3.4362141034649434e-06, |
|
"loss": 0.5834, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.9267605633802818, |
|
"grad_norm": 0.7845189104147666, |
|
"learning_rate": 3.4206484858720267e-06, |
|
"loss": 0.6282, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.9295774647887325, |
|
"grad_norm": 0.7895331554828882, |
|
"learning_rate": 3.4050998525590406e-06, |
|
"loss": 0.5515, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.9323943661971832, |
|
"grad_norm": 0.7601388876825378, |
|
"learning_rate": 3.389568370735332e-06, |
|
"loss": 0.6403, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.935211267605634, |
|
"grad_norm": 0.7414839816387602, |
|
"learning_rate": 3.374054207425811e-06, |
|
"loss": 0.5768, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.9380281690140846, |
|
"grad_norm": 0.727586108347313, |
|
"learning_rate": 3.3585575294691355e-06, |
|
"loss": 0.5856, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.9408450704225353, |
|
"grad_norm": 0.7446540281803862, |
|
"learning_rate": 3.3430785035159297e-06, |
|
"loss": 0.6374, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.943661971830986, |
|
"grad_norm": 0.7017790257080906, |
|
"learning_rate": 3.327617296026991e-06, |
|
"loss": 0.6344, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.9464788732394367, |
|
"grad_norm": 0.8243401164290531, |
|
"learning_rate": 3.3121740732714953e-06, |
|
"loss": 0.6616, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.9492957746478874, |
|
"grad_norm": 0.7026711381099482, |
|
"learning_rate": 3.296749001325212e-06, |
|
"loss": 0.5737, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.952112676056338, |
|
"grad_norm": 0.689713511673827, |
|
"learning_rate": 3.2813422460687176e-06, |
|
"loss": 0.573, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.9549295774647888, |
|
"grad_norm": 0.6772697983969495, |
|
"learning_rate": 3.265953973185613e-06, |
|
"loss": 0.5346, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.9577464788732395, |
|
"grad_norm": 0.7092702136331567, |
|
"learning_rate": 3.250584348160738e-06, |
|
"loss": 0.6034, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.9605633802816902, |
|
"grad_norm": 0.7683018287013302, |
|
"learning_rate": 3.235233536278396e-06, |
|
"loss": 0.6051, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.963380281690141, |
|
"grad_norm": 0.7173404212464849, |
|
"learning_rate": 3.2199017026205744e-06, |
|
"loss": 0.6139, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.9661971830985916, |
|
"grad_norm": 0.781035997747711, |
|
"learning_rate": 3.204589012065168e-06, |
|
"loss": 0.6684, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.9690140845070423, |
|
"grad_norm": 0.6958474566614261, |
|
"learning_rate": 3.1892956292842103e-06, |
|
"loss": 0.627, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.971830985915493, |
|
"grad_norm": 0.740337029417686, |
|
"learning_rate": 3.1740217187420947e-06, |
|
"loss": 0.6597, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9746478873239437, |
|
"grad_norm": 0.6847378780702223, |
|
"learning_rate": 3.158767444693819e-06, |
|
"loss": 0.6148, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.9774647887323944, |
|
"grad_norm": 0.715606992756826, |
|
"learning_rate": 3.1435329711832042e-06, |
|
"loss": 0.5912, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.980281690140845, |
|
"grad_norm": 0.6889429557937115, |
|
"learning_rate": 3.1283184620411387e-06, |
|
"loss": 0.5801, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.9830985915492958, |
|
"grad_norm": 0.7439465480762761, |
|
"learning_rate": 3.1131240808838175e-06, |
|
"loss": 0.6253, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.9859154929577465, |
|
"grad_norm": 0.7406110365742725, |
|
"learning_rate": 3.0979499911109768e-06, |
|
"loss": 0.623, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.9887323943661972, |
|
"grad_norm": 0.7448811885473077, |
|
"learning_rate": 3.082796355904142e-06, |
|
"loss": 0.5771, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.991549295774648, |
|
"grad_norm": 0.7286503648205558, |
|
"learning_rate": 3.06766333822487e-06, |
|
"loss": 0.5972, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.9943661971830986, |
|
"grad_norm": 0.7567721937576899, |
|
"learning_rate": 3.052551100812998e-06, |
|
"loss": 0.6863, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.9971830985915493, |
|
"grad_norm": 0.6846994910878873, |
|
"learning_rate": 3.0374598061848936e-06, |
|
"loss": 0.5628, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.8874539697229774, |
|
"learning_rate": 3.022389616631706e-06, |
|
"loss": 0.6157, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.0028169014084507, |
|
"grad_norm": 2.569736685755078, |
|
"learning_rate": 3.0073406942176214e-06, |
|
"loss": 0.7215, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.0056338028169014, |
|
"grad_norm": 0.781170783791145, |
|
"learning_rate": 2.9923132007781206e-06, |
|
"loss": 0.5392, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.008450704225352, |
|
"grad_norm": 0.7323283178454171, |
|
"learning_rate": 2.977307297918235e-06, |
|
"loss": 0.5802, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.011267605633803, |
|
"grad_norm": 0.7082378256520369, |
|
"learning_rate": 2.9623231470108194e-06, |
|
"loss": 0.5261, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.0140845070422535, |
|
"grad_norm": 0.7404544323321716, |
|
"learning_rate": 2.947360909194801e-06, |
|
"loss": 0.5326, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.016901408450704, |
|
"grad_norm": 0.8332008982216366, |
|
"learning_rate": 2.9324207453734575e-06, |
|
"loss": 0.6257, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.019718309859155, |
|
"grad_norm": 0.7727175457086233, |
|
"learning_rate": 2.917502816212685e-06, |
|
"loss": 0.6265, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.0225352112676056, |
|
"grad_norm": 0.6705214828437596, |
|
"learning_rate": 2.902607282139267e-06, |
|
"loss": 0.4832, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.0253521126760563, |
|
"grad_norm": 0.745820194371554, |
|
"learning_rate": 2.8877343033391523e-06, |
|
"loss": 0.5358, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.028169014084507, |
|
"grad_norm": 0.6712880950400633, |
|
"learning_rate": 2.8728840397557324e-06, |
|
"loss": 0.4593, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.0309859154929577, |
|
"grad_norm": 0.7234741402033795, |
|
"learning_rate": 2.8580566510881158e-06, |
|
"loss": 0.5738, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.0338028169014084, |
|
"grad_norm": 0.7871830801210608, |
|
"learning_rate": 2.8432522967894217e-06, |
|
"loss": 0.5577, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.036619718309859, |
|
"grad_norm": 0.7623729887137076, |
|
"learning_rate": 2.8284711360650517e-06, |
|
"loss": 0.5423, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.03943661971831, |
|
"grad_norm": 0.7654196844987124, |
|
"learning_rate": 2.8137133278709917e-06, |
|
"loss": 0.5362, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.0422535211267605, |
|
"grad_norm": 0.7890889396144615, |
|
"learning_rate": 2.7989790309120895e-06, |
|
"loss": 0.5604, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.045070422535211, |
|
"grad_norm": 0.8280092947316886, |
|
"learning_rate": 2.7842684036403557e-06, |
|
"loss": 0.5466, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.047887323943662, |
|
"grad_norm": 0.7384511444156504, |
|
"learning_rate": 2.769581604253262e-06, |
|
"loss": 0.5797, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.0507042253521126, |
|
"grad_norm": 0.7594972574065925, |
|
"learning_rate": 2.754918790692031e-06, |
|
"loss": 0.5416, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.0535211267605633, |
|
"grad_norm": 0.8190052185681062, |
|
"learning_rate": 2.7402801206399454e-06, |
|
"loss": 0.594, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.056338028169014, |
|
"grad_norm": 0.8089065343356978, |
|
"learning_rate": 2.7256657515206487e-06, |
|
"loss": 0.6249, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.0591549295774647, |
|
"grad_norm": 0.6795375301061102, |
|
"learning_rate": 2.7110758404964534e-06, |
|
"loss": 0.5078, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.0619718309859154, |
|
"grad_norm": 0.7199747851182974, |
|
"learning_rate": 2.6965105444666496e-06, |
|
"loss": 0.5128, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.064788732394366, |
|
"grad_norm": 0.7389839114423702, |
|
"learning_rate": 2.6819700200658204e-06, |
|
"loss": 0.5165, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.067605633802817, |
|
"grad_norm": 0.7113982746437169, |
|
"learning_rate": 2.667454423662156e-06, |
|
"loss": 0.5283, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.0704225352112675, |
|
"grad_norm": 0.8019854200692348, |
|
"learning_rate": 2.6529639113557694e-06, |
|
"loss": 0.5899, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.073239436619718, |
|
"grad_norm": 0.7470761591455513, |
|
"learning_rate": 2.638498638977024e-06, |
|
"loss": 0.4994, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.076056338028169, |
|
"grad_norm": 0.7463273814271424, |
|
"learning_rate": 2.6240587620848512e-06, |
|
"loss": 0.5425, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.0788732394366196, |
|
"grad_norm": 0.7720000642247179, |
|
"learning_rate": 2.6096444359650817e-06, |
|
"loss": 0.5621, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.0816901408450703, |
|
"grad_norm": 0.7204053665092582, |
|
"learning_rate": 2.595255815628774e-06, |
|
"loss": 0.5023, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.084507042253521, |
|
"grad_norm": 0.7566667785442299, |
|
"learning_rate": 2.580893055810545e-06, |
|
"loss": 0.5514, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.0873239436619717, |
|
"grad_norm": 0.6809427689588321, |
|
"learning_rate": 2.5665563109669155e-06, |
|
"loss": 0.5008, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.0901408450704224, |
|
"grad_norm": 0.810140586801637, |
|
"learning_rate": 2.5522457352746368e-06, |
|
"loss": 0.5852, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.092957746478873, |
|
"grad_norm": 0.7584702735377782, |
|
"learning_rate": 2.5379614826290384e-06, |
|
"loss": 0.5008, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.095774647887324, |
|
"grad_norm": 0.7871575310769966, |
|
"learning_rate": 2.5237037066423747e-06, |
|
"loss": 0.6055, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.0985915492957745, |
|
"grad_norm": 0.7431957816566821, |
|
"learning_rate": 2.509472560642171e-06, |
|
"loss": 0.579, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.101408450704225, |
|
"grad_norm": 0.74509594480476, |
|
"learning_rate": 2.495268197669573e-06, |
|
"loss": 0.5521, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.104225352112676, |
|
"grad_norm": 0.7603284772487195, |
|
"learning_rate": 2.481090770477706e-06, |
|
"loss": 0.5488, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.1070422535211266, |
|
"grad_norm": 0.7387452720309511, |
|
"learning_rate": 2.466940431530026e-06, |
|
"loss": 0.5107, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.1098591549295773, |
|
"grad_norm": 0.774018311134093, |
|
"learning_rate": 2.4528173329986855e-06, |
|
"loss": 0.5619, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.112676056338028, |
|
"grad_norm": 0.7094138769934575, |
|
"learning_rate": 2.438721626762892e-06, |
|
"loss": 0.5249, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.1154929577464787, |
|
"grad_norm": 0.7857734566073393, |
|
"learning_rate": 2.424653464407281e-06, |
|
"loss": 0.5783, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.1183098591549294, |
|
"grad_norm": 0.6681431433578359, |
|
"learning_rate": 2.4106129972202793e-06, |
|
"loss": 0.4836, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.12112676056338, |
|
"grad_norm": 0.7059928071944473, |
|
"learning_rate": 2.3966003761924816e-06, |
|
"loss": 0.4662, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.123943661971831, |
|
"grad_norm": 0.7193801912300508, |
|
"learning_rate": 2.382615752015028e-06, |
|
"loss": 0.5193, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.1267605633802815, |
|
"grad_norm": 0.7102845448999637, |
|
"learning_rate": 2.3686592750779788e-06, |
|
"loss": 0.5067, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.129577464788732, |
|
"grad_norm": 0.7427634111028115, |
|
"learning_rate": 2.3547310954687018e-06, |
|
"loss": 0.6256, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.132394366197183, |
|
"grad_norm": 0.6979392039426204, |
|
"learning_rate": 2.340831362970257e-06, |
|
"loss": 0.5096, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.1352112676056336, |
|
"grad_norm": 0.7444797928330332, |
|
"learning_rate": 2.326960227059784e-06, |
|
"loss": 0.5149, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.1380281690140843, |
|
"grad_norm": 0.7150529701936503, |
|
"learning_rate": 2.313117836906897e-06, |
|
"loss": 0.5426, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.140845070422535, |
|
"grad_norm": 0.8010874698588896, |
|
"learning_rate": 2.2993043413720784e-06, |
|
"loss": 0.5932, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.1436619718309857, |
|
"grad_norm": 0.6872563327495382, |
|
"learning_rate": 2.28551988900508e-06, |
|
"loss": 0.5149, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.1464788732394364, |
|
"grad_norm": 0.7460312856986838, |
|
"learning_rate": 2.271764628043324e-06, |
|
"loss": 0.55, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.149295774647887, |
|
"grad_norm": 0.7242016051510175, |
|
"learning_rate": 2.258038706410311e-06, |
|
"loss": 0.5533, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.152112676056338, |
|
"grad_norm": 0.7495937718335975, |
|
"learning_rate": 2.2443422717140246e-06, |
|
"loss": 0.566, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.1549295774647885, |
|
"grad_norm": 0.8090509188921273, |
|
"learning_rate": 2.2306754712453504e-06, |
|
"loss": 0.6159, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.1577464788732392, |
|
"grad_norm": 0.7296358155167403, |
|
"learning_rate": 2.217038451976485e-06, |
|
"loss": 0.5482, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.1605633802816904, |
|
"grad_norm": 0.728917758383568, |
|
"learning_rate": 2.203431360559365e-06, |
|
"loss": 0.5387, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.1633802816901406, |
|
"grad_norm": 0.7271467482036231, |
|
"learning_rate": 2.1898543433240787e-06, |
|
"loss": 0.5074, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.1661971830985918, |
|
"grad_norm": 0.7197770997886006, |
|
"learning_rate": 2.1763075462773002e-06, |
|
"loss": 0.5288, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.169014084507042, |
|
"grad_norm": 0.7238841621409884, |
|
"learning_rate": 2.1627911151007176e-06, |
|
"loss": 0.5436, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.171830985915493, |
|
"grad_norm": 0.7568866047442696, |
|
"learning_rate": 2.149305195149463e-06, |
|
"loss": 0.5536, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.1746478873239434, |
|
"grad_norm": 0.765821236668226, |
|
"learning_rate": 2.135849931450555e-06, |
|
"loss": 0.5557, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.1774647887323946, |
|
"grad_norm": 0.7794966716146099, |
|
"learning_rate": 2.1224254687013347e-06, |
|
"loss": 0.5536, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.1802816901408453, |
|
"grad_norm": 0.7525735089142571, |
|
"learning_rate": 2.1090319512679115e-06, |
|
"loss": 0.511, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.183098591549296, |
|
"grad_norm": 0.7288945530927061, |
|
"learning_rate": 2.0956695231836094e-06, |
|
"loss": 0.5095, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.1859154929577467, |
|
"grad_norm": 0.81522934972113, |
|
"learning_rate": 2.0823383281474202e-06, |
|
"loss": 0.5456, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.1887323943661974, |
|
"grad_norm": 0.8161254562914294, |
|
"learning_rate": 2.0690385095224557e-06, |
|
"loss": 0.5454, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.191549295774648, |
|
"grad_norm": 0.7453811173652941, |
|
"learning_rate": 2.0557702103344078e-06, |
|
"loss": 0.5445, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.1943661971830988, |
|
"grad_norm": 0.6574151968727797, |
|
"learning_rate": 2.0425335732700075e-06, |
|
"loss": 0.492, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.1971830985915495, |
|
"grad_norm": 0.720899834077065, |
|
"learning_rate": 2.0293287406754976e-06, |
|
"loss": 0.4851, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.6987996403755788, |
|
"learning_rate": 2.0161558545550925e-06, |
|
"loss": 0.5257, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.202816901408451, |
|
"grad_norm": 0.7902727538260627, |
|
"learning_rate": 2.0030150565694566e-06, |
|
"loss": 0.5831, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.2056338028169016, |
|
"grad_norm": 0.786480042851265, |
|
"learning_rate": 1.989906488034181e-06, |
|
"loss": 0.5464, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.2084507042253523, |
|
"grad_norm": 0.7459476013930395, |
|
"learning_rate": 1.976830289918261e-06, |
|
"loss": 0.5339, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.211267605633803, |
|
"grad_norm": 0.7899232013030847, |
|
"learning_rate": 1.9637866028425835e-06, |
|
"loss": 0.5228, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.2140845070422537, |
|
"grad_norm": 0.7298589604614047, |
|
"learning_rate": 1.950775567078411e-06, |
|
"loss": 0.5464, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.2169014084507044, |
|
"grad_norm": 0.7390448234363022, |
|
"learning_rate": 1.9377973225458773e-06, |
|
"loss": 0.6104, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.219718309859155, |
|
"grad_norm": 0.798187214794037, |
|
"learning_rate": 1.924852008812479e-06, |
|
"loss": 0.5742, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.222535211267606, |
|
"grad_norm": 0.705513153122921, |
|
"learning_rate": 1.9119397650915774e-06, |
|
"loss": 0.4954, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.2253521126760565, |
|
"grad_norm": 0.7803490529105689, |
|
"learning_rate": 1.8990607302408991e-06, |
|
"loss": 0.5839, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.228169014084507, |
|
"grad_norm": 0.772361187559715, |
|
"learning_rate": 1.8862150427610453e-06, |
|
"loss": 0.468, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.230985915492958, |
|
"grad_norm": 0.6941194804936315, |
|
"learning_rate": 1.8734028407940003e-06, |
|
"loss": 0.5351, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.2338028169014086, |
|
"grad_norm": 0.7124075210532292, |
|
"learning_rate": 1.8606242621216443e-06, |
|
"loss": 0.5628, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.2366197183098593, |
|
"grad_norm": 0.7702429591736301, |
|
"learning_rate": 1.84787944416428e-06, |
|
"loss": 0.4947, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.23943661971831, |
|
"grad_norm": 0.7404741820420859, |
|
"learning_rate": 1.835168523979144e-06, |
|
"loss": 0.54, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.2422535211267607, |
|
"grad_norm": 0.7337881238934444, |
|
"learning_rate": 1.822491638258938e-06, |
|
"loss": 0.5223, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.2450704225352114, |
|
"grad_norm": 0.732982524092136, |
|
"learning_rate": 1.8098489233303595e-06, |
|
"loss": 0.5504, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.247887323943662, |
|
"grad_norm": 0.7789300885571788, |
|
"learning_rate": 1.7972405151526339e-06, |
|
"loss": 0.5886, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.250704225352113, |
|
"grad_norm": 0.7432973879306417, |
|
"learning_rate": 1.7846665493160548e-06, |
|
"loss": 0.5765, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.2535211267605635, |
|
"grad_norm": 0.7148013041871587, |
|
"learning_rate": 1.7721271610405232e-06, |
|
"loss": 0.5088, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.256338028169014, |
|
"grad_norm": 0.7190924440924693, |
|
"learning_rate": 1.7596224851740938e-06, |
|
"loss": 0.5668, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.259154929577465, |
|
"grad_norm": 0.8176217354813979, |
|
"learning_rate": 1.747152656191527e-06, |
|
"loss": 0.5516, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.2619718309859156, |
|
"grad_norm": 0.7848830670126005, |
|
"learning_rate": 1.7347178081928407e-06, |
|
"loss": 0.555, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.2647887323943663, |
|
"grad_norm": 0.727192900316563, |
|
"learning_rate": 1.7223180749018675e-06, |
|
"loss": 0.5689, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.267605633802817, |
|
"grad_norm": 0.7379445481611638, |
|
"learning_rate": 1.7099535896648205e-06, |
|
"loss": 0.4954, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.2704225352112677, |
|
"grad_norm": 0.7304515509378274, |
|
"learning_rate": 1.6976244854488545e-06, |
|
"loss": 0.4682, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.2732394366197184, |
|
"grad_norm": 0.6864120639105775, |
|
"learning_rate": 1.6853308948406387e-06, |
|
"loss": 0.5088, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.276056338028169, |
|
"grad_norm": 0.6814634212226736, |
|
"learning_rate": 1.6730729500449322e-06, |
|
"loss": 0.4825, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.27887323943662, |
|
"grad_norm": 0.6828842277477668, |
|
"learning_rate": 1.6608507828831572e-06, |
|
"loss": 0.4605, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.2816901408450705, |
|
"grad_norm": 0.7224430516077436, |
|
"learning_rate": 1.648664524791988e-06, |
|
"loss": 0.5593, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.284507042253521, |
|
"grad_norm": 0.7151328091297615, |
|
"learning_rate": 1.636514306821933e-06, |
|
"loss": 0.5455, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.287323943661972, |
|
"grad_norm": 0.7039701863483498, |
|
"learning_rate": 1.6244002596359255e-06, |
|
"loss": 0.5416, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.2901408450704226, |
|
"grad_norm": 0.7326190869630805, |
|
"learning_rate": 1.6123225135079212e-06, |
|
"loss": 0.4614, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.2929577464788733, |
|
"grad_norm": 0.7405292155656916, |
|
"learning_rate": 1.6002811983214962e-06, |
|
"loss": 0.4931, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.295774647887324, |
|
"grad_norm": 0.7636311159243301, |
|
"learning_rate": 1.5882764435684473e-06, |
|
"loss": 0.5507, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.2985915492957747, |
|
"grad_norm": 0.7583190657458188, |
|
"learning_rate": 1.5763083783474048e-06, |
|
"loss": 0.5374, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.3014084507042254, |
|
"grad_norm": 0.7285565659472655, |
|
"learning_rate": 1.5643771313624394e-06, |
|
"loss": 0.5595, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.304225352112676, |
|
"grad_norm": 0.7347006926328963, |
|
"learning_rate": 1.552482830921681e-06, |
|
"loss": 0.555, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.307042253521127, |
|
"grad_norm": 0.7630789601522776, |
|
"learning_rate": 1.5406256049359359e-06, |
|
"loss": 0.5609, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.3098591549295775, |
|
"grad_norm": 0.7177620817775812, |
|
"learning_rate": 1.5288055809173174e-06, |
|
"loss": 0.5069, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.312676056338028, |
|
"grad_norm": 0.6983432291948535, |
|
"learning_rate": 1.517022885977868e-06, |
|
"loss": 0.532, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.315492957746479, |
|
"grad_norm": 0.7460327681895941, |
|
"learning_rate": 1.5052776468281933e-06, |
|
"loss": 0.569, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.3183098591549296, |
|
"grad_norm": 0.7790295408392904, |
|
"learning_rate": 1.4935699897761031e-06, |
|
"loss": 0.4965, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.3211267605633803, |
|
"grad_norm": 0.7437941519396709, |
|
"learning_rate": 1.4819000407252498e-06, |
|
"loss": 0.5675, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.323943661971831, |
|
"grad_norm": 0.6841364101492224, |
|
"learning_rate": 1.4702679251737768e-06, |
|
"loss": 0.5586, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.3267605633802817, |
|
"grad_norm": 0.7202178343462267, |
|
"learning_rate": 1.4586737682129653e-06, |
|
"loss": 0.4825, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.3295774647887324, |
|
"grad_norm": 0.70160510066986, |
|
"learning_rate": 1.4471176945258947e-06, |
|
"loss": 0.5504, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.332394366197183, |
|
"grad_norm": 0.8365294239621358, |
|
"learning_rate": 1.435599828386095e-06, |
|
"loss": 0.5608, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.335211267605634, |
|
"grad_norm": 0.7361499428255823, |
|
"learning_rate": 1.4241202936562164e-06, |
|
"loss": 0.5188, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.3380281690140845, |
|
"grad_norm": 0.7872741665321432, |
|
"learning_rate": 1.412679213786694e-06, |
|
"loss": 0.563, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.340845070422535, |
|
"grad_norm": 0.7530076640574325, |
|
"learning_rate": 1.40127671181442e-06, |
|
"loss": 0.5875, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.343661971830986, |
|
"grad_norm": 0.7322800554870551, |
|
"learning_rate": 1.3899129103614229e-06, |
|
"loss": 0.4666, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.3464788732394366, |
|
"grad_norm": 0.7673082565463836, |
|
"learning_rate": 1.3785879316335448e-06, |
|
"loss": 0.5516, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.3492957746478873, |
|
"grad_norm": 0.6632810241670146, |
|
"learning_rate": 1.3673018974191354e-06, |
|
"loss": 0.448, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.352112676056338, |
|
"grad_norm": 2.7516018782482017, |
|
"learning_rate": 1.3560549290877318e-06, |
|
"loss": 0.5398, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.3549295774647887, |
|
"grad_norm": 0.7211879094149636, |
|
"learning_rate": 1.3448471475887587e-06, |
|
"loss": 0.5065, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.3577464788732394, |
|
"grad_norm": 0.7995856802043136, |
|
"learning_rate": 1.3336786734502294e-06, |
|
"loss": 0.5797, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.36056338028169, |
|
"grad_norm": 0.758044778698354, |
|
"learning_rate": 1.3225496267774452e-06, |
|
"loss": 0.562, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.363380281690141, |
|
"grad_norm": 0.7575990009876885, |
|
"learning_rate": 1.311460127251708e-06, |
|
"loss": 0.5316, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.3661971830985915, |
|
"grad_norm": 0.6736159728507464, |
|
"learning_rate": 1.3004102941290297e-06, |
|
"loss": 0.4474, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.369014084507042, |
|
"grad_norm": 0.7205227793745679, |
|
"learning_rate": 1.2894002462388533e-06, |
|
"loss": 0.5218, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.371830985915493, |
|
"grad_norm": 0.7217650676264715, |
|
"learning_rate": 1.2784301019827705e-06, |
|
"loss": 0.5204, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.3746478873239436, |
|
"grad_norm": 0.7557329810128117, |
|
"learning_rate": 1.2674999793332539e-06, |
|
"loss": 0.5403, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.3774647887323943, |
|
"grad_norm": 0.7478007085164625, |
|
"learning_rate": 1.2566099958323824e-06, |
|
"loss": 0.5698, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.380281690140845, |
|
"grad_norm": 0.698380036442176, |
|
"learning_rate": 1.2457602685905834e-06, |
|
"loss": 0.5048, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.3830985915492957, |
|
"grad_norm": 0.7695914348531823, |
|
"learning_rate": 1.2349509142853672e-06, |
|
"loss": 0.4931, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.3859154929577464, |
|
"grad_norm": 0.7762276319195065, |
|
"learning_rate": 1.224182049160077e-06, |
|
"loss": 0.6103, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.388732394366197, |
|
"grad_norm": 0.7274222023728415, |
|
"learning_rate": 1.2134537890226366e-06, |
|
"loss": 0.5363, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.391549295774648, |
|
"grad_norm": 0.711203424608623, |
|
"learning_rate": 1.202766249244306e-06, |
|
"loss": 0.5136, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.3943661971830985, |
|
"grad_norm": 0.6870670016862169, |
|
"learning_rate": 1.1921195447584388e-06, |
|
"loss": 0.5454, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.397183098591549, |
|
"grad_norm": 0.7149525528652956, |
|
"learning_rate": 1.1815137900592488e-06, |
|
"loss": 0.5306, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7638558998644358, |
|
"learning_rate": 1.1709490992005774e-06, |
|
"loss": 0.5352, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.4028169014084506, |
|
"grad_norm": 0.7302152693103684, |
|
"learning_rate": 1.1604255857946667e-06, |
|
"loss": 0.5532, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.4056338028169013, |
|
"grad_norm": 0.7649138698287055, |
|
"learning_rate": 1.149943363010938e-06, |
|
"loss": 0.522, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.408450704225352, |
|
"grad_norm": 0.7093300838883863, |
|
"learning_rate": 1.1395025435747759e-06, |
|
"loss": 0.5058, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.4112676056338027, |
|
"grad_norm": 0.7009610002847351, |
|
"learning_rate": 1.1291032397663142e-06, |
|
"loss": 0.5207, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.4140845070422534, |
|
"grad_norm": 0.7033284525805, |
|
"learning_rate": 1.1187455634192307e-06, |
|
"loss": 0.5504, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.416901408450704, |
|
"grad_norm": 0.7256841908682241, |
|
"learning_rate": 1.108429625919541e-06, |
|
"loss": 0.6407, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.419718309859155, |
|
"grad_norm": 0.6662534100506925, |
|
"learning_rate": 1.098155538204404e-06, |
|
"loss": 0.4797, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.4225352112676055, |
|
"grad_norm": 0.6908974021921962, |
|
"learning_rate": 1.08792341076093e-06, |
|
"loss": 0.5222, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.425352112676056, |
|
"grad_norm": 0.6876509805065439, |
|
"learning_rate": 1.0777333536249873e-06, |
|
"loss": 0.4901, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.428169014084507, |
|
"grad_norm": 0.6927349527894001, |
|
"learning_rate": 1.067585476380023e-06, |
|
"loss": 0.4973, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.4309859154929576, |
|
"grad_norm": 0.6939033155329007, |
|
"learning_rate": 1.0574798881558834e-06, |
|
"loss": 0.5595, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.4338028169014083, |
|
"grad_norm": 0.7110587459331594, |
|
"learning_rate": 1.0474166976276396e-06, |
|
"loss": 0.5999, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.436619718309859, |
|
"grad_norm": 0.7552175181737919, |
|
"learning_rate": 1.0373960130144206e-06, |
|
"loss": 0.5635, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.4394366197183097, |
|
"grad_norm": 0.7322000886422406, |
|
"learning_rate": 1.0274179420782487e-06, |
|
"loss": 0.5159, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.4422535211267604, |
|
"grad_norm": 0.6978339620980003, |
|
"learning_rate": 1.0174825921228803e-06, |
|
"loss": 0.5035, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.445070422535211, |
|
"grad_norm": 0.7233687516678328, |
|
"learning_rate": 1.0075900699926523e-06, |
|
"loss": 0.4615, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.447887323943662, |
|
"grad_norm": 0.7029331443183017, |
|
"learning_rate": 9.977404820713315e-07, |
|
"loss": 0.5125, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.4507042253521125, |
|
"grad_norm": 0.795131750879923, |
|
"learning_rate": 9.879339342809741e-07, |
|
"loss": 0.5948, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.453521126760563, |
|
"grad_norm": 0.6927831704928772, |
|
"learning_rate": 9.781705320807833e-07, |
|
"loss": 0.5056, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.456338028169014, |
|
"grad_norm": 0.7217137637353825, |
|
"learning_rate": 9.684503804659773e-07, |
|
"loss": 0.5544, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.4591549295774646, |
|
"grad_norm": 0.7363521243814246, |
|
"learning_rate": 9.587735839666573e-07, |
|
"loss": 0.5393, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.4619718309859153, |
|
"grad_norm": 0.7313271519524395, |
|
"learning_rate": 9.491402466466893e-07, |
|
"loss": 0.5885, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.464788732394366, |
|
"grad_norm": 0.7118534580991188, |
|
"learning_rate": 9.395504721025773e-07, |
|
"loss": 0.5589, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.4676056338028167, |
|
"grad_norm": 0.7610936167591283, |
|
"learning_rate": 9.300043634623546e-07, |
|
"loss": 0.5192, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.4704225352112674, |
|
"grad_norm": 0.7178990480634524, |
|
"learning_rate": 9.205020233844736e-07, |
|
"loss": 0.4867, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.473239436619718, |
|
"grad_norm": 0.7145917047867616, |
|
"learning_rate": 9.110435540567003e-07, |
|
"loss": 0.5257, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.476056338028169, |
|
"grad_norm": 0.6846734741974113, |
|
"learning_rate": 9.016290571950171e-07, |
|
"loss": 0.5265, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.4788732394366195, |
|
"grad_norm": 0.7431090137610343, |
|
"learning_rate": 8.922586340425288e-07, |
|
"loss": 0.5516, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.4816901408450702, |
|
"grad_norm": 0.770495805259206, |
|
"learning_rate": 8.829323853683719e-07, |
|
"loss": 0.6051, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.4845070422535214, |
|
"grad_norm": 0.6952836508138835, |
|
"learning_rate": 8.736504114666345e-07, |
|
"loss": 0.511, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.4873239436619716, |
|
"grad_norm": 0.7402309170159516, |
|
"learning_rate": 8.644128121552742e-07, |
|
"loss": 0.5602, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.4901408450704228, |
|
"grad_norm": 0.7102436346119819, |
|
"learning_rate": 8.552196867750462e-07, |
|
"loss": 0.5326, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.492957746478873, |
|
"grad_norm": 0.7904150584495637, |
|
"learning_rate": 8.460711341884353e-07, |
|
"loss": 0.5704, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.495774647887324, |
|
"grad_norm": 0.7701966990313119, |
|
"learning_rate": 8.36967252778591e-07, |
|
"loss": 0.5924, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.4985915492957744, |
|
"grad_norm": 0.7369703311898811, |
|
"learning_rate": 8.279081404482748e-07, |
|
"loss": 0.5543, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.5014084507042256, |
|
"grad_norm": 0.7235281553818372, |
|
"learning_rate": 8.188938946187991e-07, |
|
"loss": 0.5459, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.504225352112676, |
|
"grad_norm": 0.7571994304517072, |
|
"learning_rate": 8.099246122289861e-07, |
|
"loss": 0.5572, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.507042253521127, |
|
"grad_norm": 0.6593905074365984, |
|
"learning_rate": 8.010003897341212e-07, |
|
"loss": 0.4757, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.5098591549295772, |
|
"grad_norm": 0.7809776499441126, |
|
"learning_rate": 7.921213231049196e-07, |
|
"loss": 0.583, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.5126760563380284, |
|
"grad_norm": 0.7763572290071068, |
|
"learning_rate": 7.832875078264912e-07, |
|
"loss": 0.5567, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.5154929577464786, |
|
"grad_norm": 0.8164057637936308, |
|
"learning_rate": 7.74499038897315e-07, |
|
"loss": 0.6174, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.5183098591549298, |
|
"grad_norm": 0.7487499523494067, |
|
"learning_rate": 7.65756010828217e-07, |
|
"loss": 0.5256, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.52112676056338, |
|
"grad_norm": 0.692947524253345, |
|
"learning_rate": 7.570585176413547e-07, |
|
"loss": 0.527, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.523943661971831, |
|
"grad_norm": 0.7745072471556728, |
|
"learning_rate": 7.484066528692041e-07, |
|
"loss": 0.5582, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.5267605633802814, |
|
"grad_norm": 0.759370710618239, |
|
"learning_rate": 7.398005095535565e-07, |
|
"loss": 0.5745, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.5295774647887326, |
|
"grad_norm": 0.7408984808818695, |
|
"learning_rate": 7.312401802445169e-07, |
|
"loss": 0.5308, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.532394366197183, |
|
"grad_norm": 0.7722385173607145, |
|
"learning_rate": 7.227257569995061e-07, |
|
"loss": 0.5938, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.535211267605634, |
|
"grad_norm": 0.7358053652279244, |
|
"learning_rate": 7.142573313822754e-07, |
|
"loss": 0.5992, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.5380281690140842, |
|
"grad_norm": 0.7192406498904013, |
|
"learning_rate": 7.058349944619186e-07, |
|
"loss": 0.5353, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.5408450704225354, |
|
"grad_norm": 0.7158007177960015, |
|
"learning_rate": 6.974588368118934e-07, |
|
"loss": 0.5445, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.543661971830986, |
|
"grad_norm": 0.7826333376030878, |
|
"learning_rate": 6.89128948509048e-07, |
|
"loss": 0.6257, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.546478873239437, |
|
"grad_norm": 0.7252757135255095, |
|
"learning_rate": 6.808454191326519e-07, |
|
"loss": 0.523, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.5492957746478875, |
|
"grad_norm": 0.7296052273393889, |
|
"learning_rate": 6.726083377634318e-07, |
|
"loss": 0.4892, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.552112676056338, |
|
"grad_norm": 0.6998049271823165, |
|
"learning_rate": 6.644177929826162e-07, |
|
"loss": 0.5785, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.554929577464789, |
|
"grad_norm": 0.6866771635498244, |
|
"learning_rate": 6.562738728709795e-07, |
|
"loss": 0.5141, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.5577464788732396, |
|
"grad_norm": 0.762851841283065, |
|
"learning_rate": 6.481766650078969e-07, |
|
"loss": 0.5784, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.5605633802816903, |
|
"grad_norm": 0.745798205319872, |
|
"learning_rate": 6.401262564704019e-07, |
|
"loss": 0.5615, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.563380281690141, |
|
"grad_norm": 0.7267076581981277, |
|
"learning_rate": 6.321227338322511e-07, |
|
"loss": 0.588, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.5661971830985917, |
|
"grad_norm": 0.683833936306517, |
|
"learning_rate": 6.241661831629902e-07, |
|
"loss": 0.5261, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.5690140845070424, |
|
"grad_norm": 0.785279539380278, |
|
"learning_rate": 6.162566900270311e-07, |
|
"loss": 0.5273, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.571830985915493, |
|
"grad_norm": 0.6821061292671917, |
|
"learning_rate": 6.083943394827329e-07, |
|
"loss": 0.4928, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.574647887323944, |
|
"grad_norm": 0.7156545834103997, |
|
"learning_rate": 6.005792160814821e-07, |
|
"loss": 0.5511, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.5774647887323945, |
|
"grad_norm": 0.6867901055418119, |
|
"learning_rate": 5.928114038667888e-07, |
|
"loss": 0.5047, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.580281690140845, |
|
"grad_norm": 0.6960326420245707, |
|
"learning_rate": 5.850909863733784e-07, |
|
"loss": 0.5591, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.583098591549296, |
|
"grad_norm": 0.7774414120953896, |
|
"learning_rate": 5.774180466262985e-07, |
|
"loss": 0.6052, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.5859154929577466, |
|
"grad_norm": 0.73166882890799, |
|
"learning_rate": 5.697926671400194e-07, |
|
"loss": 0.5807, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.5887323943661973, |
|
"grad_norm": 0.754328972659702, |
|
"learning_rate": 5.622149299175539e-07, |
|
"loss": 0.5593, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.591549295774648, |
|
"grad_norm": 0.7172975394869928, |
|
"learning_rate": 5.546849164495688e-07, |
|
"loss": 0.5434, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.5943661971830987, |
|
"grad_norm": 0.7653703517730494, |
|
"learning_rate": 5.472027077135145e-07, |
|
"loss": 0.5656, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.5971830985915494, |
|
"grad_norm": 0.6981722575591085, |
|
"learning_rate": 5.397683841727485e-07, |
|
"loss": 0.4886, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.7444541339360583, |
|
"learning_rate": 5.323820257756745e-07, |
|
"loss": 0.5395, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.602816901408451, |
|
"grad_norm": 0.6835507442292554, |
|
"learning_rate": 5.250437119548817e-07, |
|
"loss": 0.518, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.6056338028169015, |
|
"grad_norm": 0.6763495669522698, |
|
"learning_rate": 5.177535216262885e-07, |
|
"loss": 0.4711, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.608450704225352, |
|
"grad_norm": 0.6976570030316313, |
|
"learning_rate": 5.105115331882954e-07, |
|
"loss": 0.5282, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.611267605633803, |
|
"grad_norm": 0.7230013623985598, |
|
"learning_rate": 5.033178245209436e-07, |
|
"loss": 0.5319, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.6140845070422536, |
|
"grad_norm": 0.701113900863958, |
|
"learning_rate": 4.961724729850731e-07, |
|
"loss": 0.4762, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.6169014084507043, |
|
"grad_norm": 0.7593924318858984, |
|
"learning_rate": 4.890755554214954e-07, |
|
"loss": 0.5236, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.619718309859155, |
|
"grad_norm": 0.7245468462578131, |
|
"learning_rate": 4.820271481501642e-07, |
|
"loss": 0.5012, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.6225352112676057, |
|
"grad_norm": 0.742178416513282, |
|
"learning_rate": 4.7502732696935507e-07, |
|
"loss": 0.5618, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.6253521126760564, |
|
"grad_norm": 0.7757926136824119, |
|
"learning_rate": 4.680761671548517e-07, |
|
"loss": 0.5689, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.628169014084507, |
|
"grad_norm": 0.757413214376559, |
|
"learning_rate": 4.6117374345913454e-07, |
|
"loss": 0.6093, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.630985915492958, |
|
"grad_norm": 0.7808878960276914, |
|
"learning_rate": 4.5432013011057984e-07, |
|
"loss": 0.6574, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.6338028169014085, |
|
"grad_norm": 0.7432341814513662, |
|
"learning_rate": 4.4751540081265645e-07, |
|
"loss": 0.5579, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.636619718309859, |
|
"grad_norm": 0.7432292701218901, |
|
"learning_rate": 4.407596287431387e-07, |
|
"loss": 0.464, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.63943661971831, |
|
"grad_norm": 0.7142037612001625, |
|
"learning_rate": 4.340528865533161e-07, |
|
"loss": 0.5436, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.6422535211267606, |
|
"grad_norm": 0.7587147906576946, |
|
"learning_rate": 4.2739524636721207e-07, |
|
"loss": 0.5685, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.6450704225352113, |
|
"grad_norm": 0.7250510864530743, |
|
"learning_rate": 4.207867797808102e-07, |
|
"loss": 0.5706, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.647887323943662, |
|
"grad_norm": 0.7444089098403069, |
|
"learning_rate": 4.1422755786128364e-07, |
|
"loss": 0.5828, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.6507042253521127, |
|
"grad_norm": 0.7111869917066532, |
|
"learning_rate": 4.0771765114622886e-07, |
|
"loss": 0.5301, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.6535211267605634, |
|
"grad_norm": 0.6726007711358601, |
|
"learning_rate": 4.012571296429102e-07, |
|
"loss": 0.4944, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.656338028169014, |
|
"grad_norm": 0.7311381996970749, |
|
"learning_rate": 3.948460628275047e-07, |
|
"loss": 0.5329, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.659154929577465, |
|
"grad_norm": 0.7535498452656507, |
|
"learning_rate": 3.8848451964435594e-07, |
|
"loss": 0.5611, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.6619718309859155, |
|
"grad_norm": 0.7368613397695026, |
|
"learning_rate": 3.8217256850523243e-07, |
|
"loss": 0.575, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.664788732394366, |
|
"grad_norm": 0.8283481948228351, |
|
"learning_rate": 3.759102772885925e-07, |
|
"loss": 0.6088, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.667605633802817, |
|
"grad_norm": 0.6831237388540137, |
|
"learning_rate": 3.696977133388524e-07, |
|
"loss": 0.5037, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.6704225352112676, |
|
"grad_norm": 0.654320245207509, |
|
"learning_rate": 3.635349434656638e-07, |
|
"loss": 0.4809, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.6732394366197183, |
|
"grad_norm": 0.6823803621068965, |
|
"learning_rate": 3.5742203394319606e-07, |
|
"loss": 0.4552, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.676056338028169, |
|
"grad_norm": 0.7047964358595283, |
|
"learning_rate": 3.513590505094222e-07, |
|
"loss": 0.5088, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.6788732394366197, |
|
"grad_norm": 0.7489158544565429, |
|
"learning_rate": 3.453460583654106e-07, |
|
"loss": 0.52, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.6816901408450704, |
|
"grad_norm": 0.7482041266928414, |
|
"learning_rate": 3.3938312217462686e-07, |
|
"loss": 0.4875, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.684507042253521, |
|
"grad_norm": 0.7319451096212228, |
|
"learning_rate": 3.334703060622374e-07, |
|
"loss": 0.546, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.687323943661972, |
|
"grad_norm": 0.793605571383183, |
|
"learning_rate": 3.2760767361441847e-07, |
|
"loss": 0.5887, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.6901408450704225, |
|
"grad_norm": 0.695997307638334, |
|
"learning_rate": 3.2179528787767313e-07, |
|
"loss": 0.5182, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.692957746478873, |
|
"grad_norm": 0.7360062330805416, |
|
"learning_rate": 3.1603321135815434e-07, |
|
"loss": 0.6046, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.695774647887324, |
|
"grad_norm": 0.7164054436806284, |
|
"learning_rate": 3.103215060209902e-07, |
|
"loss": 0.5753, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.6985915492957746, |
|
"grad_norm": 0.7148121631621239, |
|
"learning_rate": 3.046602332896209e-07, |
|
"loss": 0.5756, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.7014084507042253, |
|
"grad_norm": 0.6847968930046422, |
|
"learning_rate": 2.9904945404513606e-07, |
|
"loss": 0.5609, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.704225352112676, |
|
"grad_norm": 0.7182111847241899, |
|
"learning_rate": 2.9348922862562025e-07, |
|
"loss": 0.5507, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.7070422535211267, |
|
"grad_norm": 0.782842512987059, |
|
"learning_rate": 2.879796168255039e-07, |
|
"loss": 0.5537, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.7098591549295774, |
|
"grad_norm": 0.7642184480760954, |
|
"learning_rate": 2.825206778949219e-07, |
|
"loss": 0.5525, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.712676056338028, |
|
"grad_norm": 0.7731085239034594, |
|
"learning_rate": 2.771124705390743e-07, |
|
"loss": 0.6081, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.715492957746479, |
|
"grad_norm": 0.7025162728788912, |
|
"learning_rate": 2.717550529175955e-07, |
|
"loss": 0.4821, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.7183098591549295, |
|
"grad_norm": 0.7198454479509169, |
|
"learning_rate": 2.664484826439312e-07, |
|
"loss": 0.5213, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.72112676056338, |
|
"grad_norm": 0.7576950704979507, |
|
"learning_rate": 2.611928167847133e-07, |
|
"loss": 0.5119, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.723943661971831, |
|
"grad_norm": 0.7263675362698118, |
|
"learning_rate": 2.5598811185915397e-07, |
|
"loss": 0.5309, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.7267605633802816, |
|
"grad_norm": 0.8071498912093302, |
|
"learning_rate": 2.5083442383843136e-07, |
|
"loss": 0.6026, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.7295774647887323, |
|
"grad_norm": 0.716757593430612, |
|
"learning_rate": 2.457318081450899e-07, |
|
"loss": 0.4898, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.732394366197183, |
|
"grad_norm": 0.7652310058804378, |
|
"learning_rate": 2.4068031965244486e-07, |
|
"loss": 0.5468, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.7352112676056337, |
|
"grad_norm": 0.7250545500999873, |
|
"learning_rate": 2.3568001268399088e-07, |
|
"loss": 0.5776, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.7380281690140844, |
|
"grad_norm": 0.728736775512018, |
|
"learning_rate": 2.3073094101282056e-07, |
|
"loss": 0.5576, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.740845070422535, |
|
"grad_norm": 0.6672516406987512, |
|
"learning_rate": 2.258331578610423e-07, |
|
"loss": 0.4974, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.743661971830986, |
|
"grad_norm": 0.706845139654572, |
|
"learning_rate": 2.2098671589921017e-07, |
|
"loss": 0.5409, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.7464788732394365, |
|
"grad_norm": 0.6557210086436945, |
|
"learning_rate": 2.1619166724575924e-07, |
|
"loss": 0.5145, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.749295774647887, |
|
"grad_norm": 0.6968785117284902, |
|
"learning_rate": 2.114480634664401e-07, |
|
"loss": 0.525, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.752112676056338, |
|
"grad_norm": 0.7560876394254348, |
|
"learning_rate": 2.0675595557376916e-07, |
|
"loss": 0.5601, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.7549295774647886, |
|
"grad_norm": 0.7526488510684558, |
|
"learning_rate": 2.0211539402647807e-07, |
|
"loss": 0.5691, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.7577464788732393, |
|
"grad_norm": 0.6816358178509648, |
|
"learning_rate": 1.9752642872897078e-07, |
|
"loss": 0.4796, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.76056338028169, |
|
"grad_norm": 0.7374455810319034, |
|
"learning_rate": 1.9298910903078838e-07, |
|
"loss": 0.5415, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.7633802816901407, |
|
"grad_norm": 0.6844607412321583, |
|
"learning_rate": 1.8850348372607575e-07, |
|
"loss": 0.5275, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.7661971830985914, |
|
"grad_norm": 0.6959147100628197, |
|
"learning_rate": 1.840696010530596e-07, |
|
"loss": 0.5354, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.769014084507042, |
|
"grad_norm": 0.7374698652571952, |
|
"learning_rate": 1.7968750869352792e-07, |
|
"loss": 0.5119, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.771830985915493, |
|
"grad_norm": 0.717663869275445, |
|
"learning_rate": 1.753572537723186e-07, |
|
"loss": 0.5049, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.7746478873239435, |
|
"grad_norm": 0.6950965076865067, |
|
"learning_rate": 1.7107888285681106e-07, |
|
"loss": 0.55, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.777464788732394, |
|
"grad_norm": 0.7644851193031408, |
|
"learning_rate": 1.6685244195642715e-07, |
|
"loss": 0.5415, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.780281690140845, |
|
"grad_norm": 0.7470884993973315, |
|
"learning_rate": 1.6267797652213368e-07, |
|
"loss": 0.5456, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.7830985915492956, |
|
"grad_norm": 0.7605411571407109, |
|
"learning_rate": 1.5855553144595858e-07, |
|
"loss": 0.5865, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.7859154929577463, |
|
"grad_norm": 0.7366936546933648, |
|
"learning_rate": 1.5448515106050165e-07, |
|
"loss": 0.4865, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.788732394366197, |
|
"grad_norm": 0.7539626563855862, |
|
"learning_rate": 1.5046687913846392e-07, |
|
"loss": 0.5467, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.7915492957746477, |
|
"grad_norm": 0.7014547717515902, |
|
"learning_rate": 1.4650075889217297e-07, |
|
"loss": 0.5575, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.7943661971830984, |
|
"grad_norm": 0.683981370181626, |
|
"learning_rate": 1.4258683297311891e-07, |
|
"loss": 0.5005, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.7971830985915496, |
|
"grad_norm": 0.7191901291359036, |
|
"learning_rate": 1.3872514347149756e-07, |
|
"loss": 0.5829, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.6988468027896046, |
|
"learning_rate": 1.3491573191575513e-07, |
|
"loss": 0.4915, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.802816901408451, |
|
"grad_norm": 0.7458383149541131, |
|
"learning_rate": 1.3115863927214423e-07, |
|
"loss": 0.5119, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.8056338028169012, |
|
"grad_norm": 0.7792116484568382, |
|
"learning_rate": 1.2745390594428143e-07, |
|
"loss": 0.5684, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.8084507042253524, |
|
"grad_norm": 0.7680837077144288, |
|
"learning_rate": 1.2380157177271369e-07, |
|
"loss": 0.5421, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.8112676056338026, |
|
"grad_norm": 0.7354184387370749, |
|
"learning_rate": 1.202016760344893e-07, |
|
"loss": 0.5652, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.8140845070422538, |
|
"grad_norm": 0.7345418602331815, |
|
"learning_rate": 1.166542574427354e-07, |
|
"loss": 0.5414, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.816901408450704, |
|
"grad_norm": 0.7264331561764557, |
|
"learning_rate": 1.131593541462439e-07, |
|
"loss": 0.5053, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.819718309859155, |
|
"grad_norm": 0.7215698428704066, |
|
"learning_rate": 1.0971700372905736e-07, |
|
"loss": 0.5694, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.8225352112676054, |
|
"grad_norm": 0.6924431328276708, |
|
"learning_rate": 1.0632724321006816e-07, |
|
"loss": 0.4931, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.8253521126760566, |
|
"grad_norm": 0.7433999913262525, |
|
"learning_rate": 1.0299010904261886e-07, |
|
"loss": 0.5269, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.828169014084507, |
|
"grad_norm": 0.7591641686077665, |
|
"learning_rate": 9.970563711410974e-08, |
|
"loss": 0.5789, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.830985915492958, |
|
"grad_norm": 0.7206709788993783, |
|
"learning_rate": 9.647386274561466e-08, |
|
"loss": 0.5783, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.8338028169014082, |
|
"grad_norm": 0.763303001346764, |
|
"learning_rate": 9.329482069149853e-08, |
|
"loss": 0.5639, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.8366197183098594, |
|
"grad_norm": 0.7171907005458857, |
|
"learning_rate": 9.016854513904828e-08, |
|
"loss": 0.5147, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.8394366197183096, |
|
"grad_norm": 0.7333285024549918, |
|
"learning_rate": 8.709506970809855e-08, |
|
"loss": 0.5431, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.8422535211267608, |
|
"grad_norm": 0.7283810889890997, |
|
"learning_rate": 8.407442745067552e-08, |
|
"loss": 0.5362, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.845070422535211, |
|
"grad_norm": 0.7149664274563876, |
|
"learning_rate": 8.110665085063918e-08, |
|
"loss": 0.5162, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.847887323943662, |
|
"grad_norm": 0.7302249666055635, |
|
"learning_rate": 7.819177182333493e-08, |
|
"loss": 0.5649, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.8507042253521124, |
|
"grad_norm": 0.6819672875462226, |
|
"learning_rate": 7.532982171524927e-08, |
|
"loss": 0.5188, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.8535211267605636, |
|
"grad_norm": 0.7429108794162049, |
|
"learning_rate": 7.252083130367349e-08, |
|
"loss": 0.5547, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.856338028169014, |
|
"grad_norm": 0.6925223924918105, |
|
"learning_rate": 6.97648307963733e-08, |
|
"loss": 0.5092, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.859154929577465, |
|
"grad_norm": 0.731757804946538, |
|
"learning_rate": 6.706184983126196e-08, |
|
"loss": 0.5549, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.8619718309859152, |
|
"grad_norm": 0.7104692499216267, |
|
"learning_rate": 6.441191747608322e-08, |
|
"loss": 0.5388, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.8647887323943664, |
|
"grad_norm": 0.6701176981914706, |
|
"learning_rate": 6.181506222809885e-08, |
|
"loss": 0.5234, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.8676056338028166, |
|
"grad_norm": 0.7709788734633877, |
|
"learning_rate": 5.9271312013781665e-08, |
|
"loss": 0.593, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.870422535211268, |
|
"grad_norm": 0.7829907431052303, |
|
"learning_rate": 5.678069418851351e-08, |
|
"loss": 0.6111, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.873239436619718, |
|
"grad_norm": 0.7037842335096338, |
|
"learning_rate": 5.4343235536294944e-08, |
|
"loss": 0.4803, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.876056338028169, |
|
"grad_norm": 0.667841942594328, |
|
"learning_rate": 5.195896226945385e-08, |
|
"loss": 0.4703, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.87887323943662, |
|
"grad_norm": 0.7093517015710021, |
|
"learning_rate": 4.9627900028365036e-08, |
|
"loss": 0.5247, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.8816901408450706, |
|
"grad_norm": 0.7804094006727855, |
|
"learning_rate": 4.735007388117441e-08, |
|
"loss": 0.5622, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.8845070422535213, |
|
"grad_norm": 0.7281503997189487, |
|
"learning_rate": 4.5125508323528025e-08, |
|
"loss": 0.5361, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.887323943661972, |
|
"grad_norm": 0.7459062923541441, |
|
"learning_rate": 4.2954227278310666e-08, |
|
"loss": 0.5618, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.8901408450704227, |
|
"grad_norm": 0.7441252088100604, |
|
"learning_rate": 4.083625409538772e-08, |
|
"loss": 0.6007, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.8929577464788734, |
|
"grad_norm": 0.717554049706811, |
|
"learning_rate": 3.877161155135423e-08, |
|
"loss": 0.5736, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.895774647887324, |
|
"grad_norm": 0.696592390563458, |
|
"learning_rate": 3.676032184928957e-08, |
|
"loss": 0.5046, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.898591549295775, |
|
"grad_norm": 0.6542018044465501, |
|
"learning_rate": 3.4802406618518195e-08, |
|
"loss": 0.4593, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.9014084507042255, |
|
"grad_norm": 0.6815251687238045, |
|
"learning_rate": 3.289788691437923e-08, |
|
"loss": 0.4588, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.904225352112676, |
|
"grad_norm": 0.7027235666996269, |
|
"learning_rate": 3.10467832179967e-08, |
|
"loss": 0.5113, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.907042253521127, |
|
"grad_norm": 0.7607538460194083, |
|
"learning_rate": 2.9249115436063013e-08, |
|
"loss": 0.5941, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.9098591549295776, |
|
"grad_norm": 0.7220736725918624, |
|
"learning_rate": 2.7504902900621356e-08, |
|
"loss": 0.4933, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.9126760563380283, |
|
"grad_norm": 0.7166615863712383, |
|
"learning_rate": 2.581416436886086e-08, |
|
"loss": 0.5728, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.915492957746479, |
|
"grad_norm": 0.7971855756389676, |
|
"learning_rate": 2.4176918022912333e-08, |
|
"loss": 0.6093, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.9183098591549297, |
|
"grad_norm": 0.708380543577938, |
|
"learning_rate": 2.2593181469653945e-08, |
|
"loss": 0.5801, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.9211267605633804, |
|
"grad_norm": 0.7432289103223266, |
|
"learning_rate": 2.1062971740523076e-08, |
|
"loss": 0.5307, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.923943661971831, |
|
"grad_norm": 0.7786874168213611, |
|
"learning_rate": 1.958630529133032e-08, |
|
"loss": 0.5472, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.926760563380282, |
|
"grad_norm": 0.7737112697875131, |
|
"learning_rate": 1.816319800208466e-08, |
|
"loss": 0.6477, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.9295774647887325, |
|
"grad_norm": 0.755421734849608, |
|
"learning_rate": 1.679366517682246e-08, |
|
"loss": 0.4996, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.932394366197183, |
|
"grad_norm": 0.7344390660036586, |
|
"learning_rate": 1.547772154344207e-08, |
|
"loss": 0.5684, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.935211267605634, |
|
"grad_norm": 0.7455018669291504, |
|
"learning_rate": 1.4215381253546712e-08, |
|
"loss": 0.5352, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.9380281690140846, |
|
"grad_norm": 0.6500928946288003, |
|
"learning_rate": 1.3006657882290163e-08, |
|
"loss": 0.4517, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.9408450704225353, |
|
"grad_norm": 0.7398268482770803, |
|
"learning_rate": 1.1851564428232987e-08, |
|
"loss": 0.4815, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.943661971830986, |
|
"grad_norm": 0.6758826237123673, |
|
"learning_rate": 1.0750113313202082e-08, |
|
"loss": 0.4791, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.9464788732394367, |
|
"grad_norm": 0.7585979778426581, |
|
"learning_rate": 9.70231638215635e-09, |
|
"loss": 0.6102, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.9492957746478874, |
|
"grad_norm": 0.7103789084083153, |
|
"learning_rate": 8.708184903060135e-09, |
|
"loss": 0.4698, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.952112676056338, |
|
"grad_norm": 0.7183494091050963, |
|
"learning_rate": 7.76772956676164e-09, |
|
"loss": 0.5356, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.954929577464789, |
|
"grad_norm": 0.7193080568255721, |
|
"learning_rate": 6.880960486877475e-09, |
|
"loss": 0.5064, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.9577464788732395, |
|
"grad_norm": 0.7797853793145919, |
|
"learning_rate": 6.047887199686075e-09, |
|
"loss": 0.5628, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.96056338028169, |
|
"grad_norm": 0.7388278933529127, |
|
"learning_rate": 5.268518664022226e-09, |
|
"loss": 0.5486, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.963380281690141, |
|
"grad_norm": 0.7159582661188039, |
|
"learning_rate": 4.54286326118214e-09, |
|
"loss": 0.5335, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.9661971830985916, |
|
"grad_norm": 0.7133437665306763, |
|
"learning_rate": 3.870928794834083e-09, |
|
"loss": 0.4978, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.9690140845070423, |
|
"grad_norm": 0.732528489265048, |
|
"learning_rate": 3.2527224909328914e-09, |
|
"loss": 0.561, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.971830985915493, |
|
"grad_norm": 0.7353062443834698, |
|
"learning_rate": 2.6882509976433604e-09, |
|
"loss": 0.5916, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.9746478873239437, |
|
"grad_norm": 0.7331761189776203, |
|
"learning_rate": 2.1775203852680836e-09, |
|
"loss": 0.4967, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.9774647887323944, |
|
"grad_norm": 0.7162322949974765, |
|
"learning_rate": 1.7205361461825054e-09, |
|
"loss": 0.5463, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.980281690140845, |
|
"grad_norm": 0.7187767220372333, |
|
"learning_rate": 1.317303194776076e-09, |
|
"loss": 0.4954, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.983098591549296, |
|
"grad_norm": 0.7051041793653454, |
|
"learning_rate": 9.678258673995189e-10, |
|
"loss": 0.5135, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.9859154929577465, |
|
"grad_norm": 0.7268714889415322, |
|
"learning_rate": 6.721079223165339e-10, |
|
"loss": 0.5272, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.988732394366197, |
|
"grad_norm": 1.1890425193935092, |
|
"learning_rate": 4.301525396666062e-10, |
|
"loss": 0.6821, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.991549295774648, |
|
"grad_norm": 0.7030030929326179, |
|
"learning_rate": 2.4196232142614794e-10, |
|
"loss": 0.5686, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.9943661971830986, |
|
"grad_norm": 0.6908075641092671, |
|
"learning_rate": 1.0753929138629382e-10, |
|
"loss": 0.5257, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.9971830985915493, |
|
"grad_norm": 0.7428862677010641, |
|
"learning_rate": 2.6884895126255695e-11, |
|
"loss": 0.4927, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.7428387204524438, |
|
"learning_rate": 0.0, |
|
"loss": 0.5818, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1065, |
|
"total_flos": 171917700349952.0, |
|
"train_loss": 0.6223782367549592, |
|
"train_runtime": 3879.5995, |
|
"train_samples_per_second": 26.353, |
|
"train_steps_per_second": 0.275 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1065, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 171917700349952.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|