|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.4342162396873643, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004342162396873643, |
|
"grad_norm": 0.9606487154960632, |
|
"learning_rate": 5e-05, |
|
"loss": 9.188, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0008684324793747286, |
|
"grad_norm": 0.9193044304847717, |
|
"learning_rate": 0.0001, |
|
"loss": 9.1897, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0013026487190620929, |
|
"grad_norm": 0.9406704306602478, |
|
"learning_rate": 0.00015, |
|
"loss": 9.0726, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0017368649587494573, |
|
"grad_norm": 0.9401040077209473, |
|
"learning_rate": 0.0002, |
|
"loss": 8.7129, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0021710811984368217, |
|
"grad_norm": 0.8919561505317688, |
|
"learning_rate": 0.00025, |
|
"loss": 8.1364, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0026052974381241857, |
|
"grad_norm": 0.8310884237289429, |
|
"learning_rate": 0.0003, |
|
"loss": 7.4088, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00303951367781155, |
|
"grad_norm": 0.748382031917572, |
|
"learning_rate": 0.00035, |
|
"loss": 6.6918, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0034737299174989146, |
|
"grad_norm": 0.6353039145469666, |
|
"learning_rate": 0.0004, |
|
"loss": 5.8753, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0039079461571862786, |
|
"grad_norm": 0.5517627000808716, |
|
"learning_rate": 0.00045000000000000004, |
|
"loss": 5.4347, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.004342162396873643, |
|
"grad_norm": 0.5389806628227234, |
|
"learning_rate": 0.0005, |
|
"loss": 5.0181, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004776378636561007, |
|
"grad_norm": 0.6018754243850708, |
|
"learning_rate": 0.00055, |
|
"loss": 4.9487, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.005210594876248371, |
|
"grad_norm": 0.6734316349029541, |
|
"learning_rate": 0.0006, |
|
"loss": 4.9233, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005644811115935736, |
|
"grad_norm": 0.6842392086982727, |
|
"learning_rate": 0.0006500000000000001, |
|
"loss": 4.8078, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0060790273556231, |
|
"grad_norm": 0.6519020199775696, |
|
"learning_rate": 0.0007, |
|
"loss": 4.6065, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.006513243595310464, |
|
"grad_norm": 0.5788787603378296, |
|
"learning_rate": 0.00075, |
|
"loss": 4.3967, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006947459834997829, |
|
"grad_norm": 0.5370550751686096, |
|
"learning_rate": 0.0008, |
|
"loss": 4.1971, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.007381676074685193, |
|
"grad_norm": 0.4862042963504791, |
|
"learning_rate": 0.00085, |
|
"loss": 3.8583, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.007815892314372557, |
|
"grad_norm": 0.4636250436306, |
|
"learning_rate": 0.0009000000000000001, |
|
"loss": 3.6372, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.008250108554059922, |
|
"grad_norm": 0.4613461494445801, |
|
"learning_rate": 0.00095, |
|
"loss": 3.5642, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.008684324793747287, |
|
"grad_norm": 0.4593052566051483, |
|
"learning_rate": 0.001, |
|
"loss": 3.4463, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00911854103343465, |
|
"grad_norm": 0.4367975890636444, |
|
"learning_rate": 0.0009999974308631955, |
|
"loss": 3.2621, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.009552757273122015, |
|
"grad_norm": 0.41558611392974854, |
|
"learning_rate": 0.000999989723479183, |
|
"loss": 3.0598, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00998697351280938, |
|
"grad_norm": 0.3967452049255371, |
|
"learning_rate": 0.0009999768779271685, |
|
"loss": 2.936, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.010421189752496743, |
|
"grad_norm": 0.3664601147174835, |
|
"learning_rate": 0.0009999588943391596, |
|
"loss": 2.6913, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.010855405992184108, |
|
"grad_norm": 0.359183669090271, |
|
"learning_rate": 0.0009999357728999656, |
|
"loss": 2.5639, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.011289622231871473, |
|
"grad_norm": 0.3342216908931732, |
|
"learning_rate": 0.000999907513847195, |
|
"loss": 2.401, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.011723838471558836, |
|
"grad_norm": 0.36653760075569153, |
|
"learning_rate": 0.0009998741174712534, |
|
"loss": 2.6216, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 0.30254194140434265, |
|
"learning_rate": 0.00099983558411534, |
|
"loss": 2.2086, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.012592270950933565, |
|
"grad_norm": 0.2917495369911194, |
|
"learning_rate": 0.0009997919141754449, |
|
"loss": 2.1274, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.013026487190620929, |
|
"grad_norm": 0.2690434455871582, |
|
"learning_rate": 0.000999743108100344, |
|
"loss": 2.0581, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013460703430308293, |
|
"grad_norm": 0.26215028762817383, |
|
"learning_rate": 0.0009996891663915954, |
|
"loss": 1.9764, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.013894919669995658, |
|
"grad_norm": 0.2453673779964447, |
|
"learning_rate": 0.0009996300896035338, |
|
"loss": 1.9219, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.014329135909683021, |
|
"grad_norm": 0.24287743866443634, |
|
"learning_rate": 0.0009995658783432644, |
|
"loss": 1.8433, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.014763352149370386, |
|
"grad_norm": 0.2258252203464508, |
|
"learning_rate": 0.0009994965332706573, |
|
"loss": 1.7392, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.015197568389057751, |
|
"grad_norm": 0.21900461614131927, |
|
"learning_rate": 0.0009994220550983404, |
|
"loss": 1.7185, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.015631784628745114, |
|
"grad_norm": 0.21046680212020874, |
|
"learning_rate": 0.0009993424445916923, |
|
"loss": 1.6541, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01606600086843248, |
|
"grad_norm": 0.19736959040164948, |
|
"learning_rate": 0.0009992577025688338, |
|
"loss": 1.6321, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.016500217108119844, |
|
"grad_norm": 0.19068720936775208, |
|
"learning_rate": 0.0009991678299006205, |
|
"loss": 1.6236, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01693443334780721, |
|
"grad_norm": 0.18818727135658264, |
|
"learning_rate": 0.000999072827510633, |
|
"loss": 1.5791, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.017368649587494574, |
|
"grad_norm": 0.17485196888446808, |
|
"learning_rate": 0.0009989726963751681, |
|
"loss": 1.6086, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.017802865827181935, |
|
"grad_norm": 0.1762186735868454, |
|
"learning_rate": 0.000998867437523228, |
|
"loss": 1.5328, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 0.19658854603767395, |
|
"learning_rate": 0.0009987570520365104, |
|
"loss": 1.4491, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.018671298306556665, |
|
"grad_norm": 0.1639322191476822, |
|
"learning_rate": 0.0009986415410493965, |
|
"loss": 1.4447, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01910551454624403, |
|
"grad_norm": 0.1552889496088028, |
|
"learning_rate": 0.000998520905748941, |
|
"loss": 1.4345, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.019539730785931395, |
|
"grad_norm": 0.14960208535194397, |
|
"learning_rate": 0.0009983951473748578, |
|
"loss": 1.364, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01997394702561876, |
|
"grad_norm": 0.14552171528339386, |
|
"learning_rate": 0.0009982642672195091, |
|
"loss": 1.3422, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02040816326530612, |
|
"grad_norm": 0.14638382196426392, |
|
"learning_rate": 0.0009981282666278908, |
|
"loss": 1.325, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.020842379504993486, |
|
"grad_norm": 0.1372004598379135, |
|
"learning_rate": 0.0009979871469976197, |
|
"loss": 1.2685, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02127659574468085, |
|
"grad_norm": 0.12978093326091766, |
|
"learning_rate": 0.0009978409097789176, |
|
"loss": 1.3268, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.021710811984368215, |
|
"grad_norm": 0.13076020777225494, |
|
"learning_rate": 0.0009976895564745992, |
|
"loss": 1.2211, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02214502822405558, |
|
"grad_norm": 0.1264386773109436, |
|
"learning_rate": 0.000997533088640053, |
|
"loss": 1.2597, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.022579244463742945, |
|
"grad_norm": 0.12510421872138977, |
|
"learning_rate": 0.0009973715078832287, |
|
"loss": 1.178, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02301346070343031, |
|
"grad_norm": 0.13376790285110474, |
|
"learning_rate": 0.0009972048158646183, |
|
"loss": 1.1614, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02344767694311767, |
|
"grad_norm": 0.11965133249759674, |
|
"learning_rate": 0.00099703301429724, |
|
"loss": 1.1549, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.023881893182805036, |
|
"grad_norm": 0.10872318595647812, |
|
"learning_rate": 0.0009968561049466214, |
|
"loss": 1.143, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 0.12483104318380356, |
|
"learning_rate": 0.000996674089630779, |
|
"loss": 1.2124, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.024750325662179766, |
|
"grad_norm": 0.10931334644556046, |
|
"learning_rate": 0.0009964869702202021, |
|
"loss": 1.1176, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.02518454190186713, |
|
"grad_norm": 0.12575986981391907, |
|
"learning_rate": 0.0009962947486378325, |
|
"loss": 1.0858, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.025618758141554496, |
|
"grad_norm": 0.11589810997247696, |
|
"learning_rate": 0.0009960974268590438, |
|
"loss": 1.0867, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.026052974381241857, |
|
"grad_norm": 0.10477310419082642, |
|
"learning_rate": 0.000995895006911623, |
|
"loss": 1.1162, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.026487190620929222, |
|
"grad_norm": 0.09671472758054733, |
|
"learning_rate": 0.0009956874908757481, |
|
"loss": 1.0662, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.026921406860616587, |
|
"grad_norm": 0.10130201280117035, |
|
"learning_rate": 0.0009954748808839674, |
|
"loss": 1.0826, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.02735562310030395, |
|
"grad_norm": 0.10421687364578247, |
|
"learning_rate": 0.0009952571791211776, |
|
"loss": 1.0302, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.027789839339991317, |
|
"grad_norm": 0.09363257884979248, |
|
"learning_rate": 0.0009950343878246009, |
|
"loss": 1.0769, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.02822405557967868, |
|
"grad_norm": 0.10000266879796982, |
|
"learning_rate": 0.000994806509283763, |
|
"loss": 1.0048, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.028658271819366043, |
|
"grad_norm": 0.091400146484375, |
|
"learning_rate": 0.0009945735458404682, |
|
"loss": 0.9903, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.029092488059053408, |
|
"grad_norm": 0.09233607351779938, |
|
"learning_rate": 0.0009943354998887763, |
|
"loss": 1.0563, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.029526704298740773, |
|
"grad_norm": 0.0991148054599762, |
|
"learning_rate": 0.0009940923738749779, |
|
"loss": 1.0117, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.029960920538428137, |
|
"grad_norm": 0.08926694095134735, |
|
"learning_rate": 0.0009938441702975688, |
|
"loss": 0.974, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.030395136778115502, |
|
"grad_norm": 0.08934075385332108, |
|
"learning_rate": 0.0009935908917072251, |
|
"loss": 1.0283, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.030829353017802867, |
|
"grad_norm": 0.11343932151794434, |
|
"learning_rate": 0.000993332540706776, |
|
"loss": 0.998, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03126356925749023, |
|
"grad_norm": 0.11343089491128922, |
|
"learning_rate": 0.0009930691199511775, |
|
"loss": 0.993, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0316977854971776, |
|
"grad_norm": 0.09531175345182419, |
|
"learning_rate": 0.000992800632147486, |
|
"loss": 1.0179, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03213200173686496, |
|
"grad_norm": 0.13536381721496582, |
|
"learning_rate": 0.0009925270800548284, |
|
"loss": 0.9539, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03256621797655232, |
|
"grad_norm": 0.09377475082874298, |
|
"learning_rate": 0.0009922484664843763, |
|
"loss": 0.9811, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03300043421623969, |
|
"grad_norm": 0.09711369127035141, |
|
"learning_rate": 0.0009919647942993148, |
|
"loss": 0.9223, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03343465045592705, |
|
"grad_norm": 0.11353003978729248, |
|
"learning_rate": 0.0009916760664148148, |
|
"loss": 0.9736, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03386886669561442, |
|
"grad_norm": 0.09144185483455658, |
|
"learning_rate": 0.0009913822857980019, |
|
"loss": 0.9576, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03430308293530178, |
|
"grad_norm": 0.08733920753002167, |
|
"learning_rate": 0.0009910834554679266, |
|
"loss": 0.9016, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03473729917498915, |
|
"grad_norm": 0.08889693766832352, |
|
"learning_rate": 0.0009907795784955327, |
|
"loss": 0.8981, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03517151541467651, |
|
"grad_norm": 0.081461600959301, |
|
"learning_rate": 0.0009904706580036264, |
|
"loss": 0.9246, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03560573165436387, |
|
"grad_norm": 0.08032266050577164, |
|
"learning_rate": 0.0009901566971668436, |
|
"loss": 0.901, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03603994789405124, |
|
"grad_norm": 0.08888774365186691, |
|
"learning_rate": 0.0009898376992116178, |
|
"loss": 0.916, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 0.08942156285047531, |
|
"learning_rate": 0.0009895136674161465, |
|
"loss": 0.9122, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03690838037342597, |
|
"grad_norm": 0.08162346482276917, |
|
"learning_rate": 0.0009891846051103576, |
|
"loss": 0.8893, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03734259661311333, |
|
"grad_norm": 0.08491238951683044, |
|
"learning_rate": 0.0009888505156758758, |
|
"loss": 0.8572, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.0377768128528007, |
|
"grad_norm": 0.07837095111608505, |
|
"learning_rate": 0.0009885114025459864, |
|
"loss": 0.851, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03821102909248806, |
|
"grad_norm": 0.08016610145568848, |
|
"learning_rate": 0.0009881672692056021, |
|
"loss": 0.8985, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03864524533217542, |
|
"grad_norm": 0.07894952595233917, |
|
"learning_rate": 0.000987818119191225, |
|
"loss": 0.8719, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03907946157186279, |
|
"grad_norm": 0.07604355365037918, |
|
"learning_rate": 0.0009874639560909118, |
|
"loss": 0.883, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03951367781155015, |
|
"grad_norm": 0.07936527580022812, |
|
"learning_rate": 0.0009871047835442364, |
|
"loss": 0.8385, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03994789405123752, |
|
"grad_norm": 0.08259500563144684, |
|
"learning_rate": 0.0009867406052422524, |
|
"loss": 0.8862, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04038211029092488, |
|
"grad_norm": 0.09410865604877472, |
|
"learning_rate": 0.0009863714249274552, |
|
"loss": 0.8654, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04081632653061224, |
|
"grad_norm": 0.08195285499095917, |
|
"learning_rate": 0.000985997246393744, |
|
"loss": 0.8616, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04125054277029961, |
|
"grad_norm": 0.07515832781791687, |
|
"learning_rate": 0.000985618073486382, |
|
"loss": 0.858, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04168475900998697, |
|
"grad_norm": 0.07317376881837845, |
|
"learning_rate": 0.0009852339101019574, |
|
"loss": 0.8699, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04211897524967434, |
|
"grad_norm": 0.088593028485775, |
|
"learning_rate": 0.0009848447601883434, |
|
"loss": 0.8326, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 0.07599225640296936, |
|
"learning_rate": 0.0009844506277446577, |
|
"loss": 0.7984, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04298740772904907, |
|
"grad_norm": 0.08955889940261841, |
|
"learning_rate": 0.0009840515168212207, |
|
"loss": 0.8489, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.04342162396873643, |
|
"grad_norm": 0.07896170765161514, |
|
"learning_rate": 0.0009836474315195148, |
|
"loss": 0.8133, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04385584020842379, |
|
"grad_norm": 0.08088196814060211, |
|
"learning_rate": 0.0009832383759921415, |
|
"loss": 0.8356, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.04429005644811116, |
|
"grad_norm": 0.08699057996273041, |
|
"learning_rate": 0.0009828243544427796, |
|
"loss": 0.8226, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.04472427268779852, |
|
"grad_norm": 0.08965849131345749, |
|
"learning_rate": 0.0009824053711261404, |
|
"loss": 0.8137, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.04515848892748589, |
|
"grad_norm": 0.09515663236379623, |
|
"learning_rate": 0.0009819814303479266, |
|
"loss": 0.7937, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.04559270516717325, |
|
"grad_norm": 0.06896204501390457, |
|
"learning_rate": 0.0009815525364647853, |
|
"loss": 0.8056, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04602692140686062, |
|
"grad_norm": 0.08081001043319702, |
|
"learning_rate": 0.0009811186938842645, |
|
"loss": 0.8465, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.04646113764654798, |
|
"grad_norm": 0.07806787639856339, |
|
"learning_rate": 0.000980679907064768, |
|
"loss": 0.7897, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04689535388623534, |
|
"grad_norm": 0.07646839320659637, |
|
"learning_rate": 0.0009802361805155097, |
|
"loss": 0.7929, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04732957012592271, |
|
"grad_norm": 0.08139129728078842, |
|
"learning_rate": 0.000979787518796466, |
|
"loss": 0.8097, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04776378636561007, |
|
"grad_norm": 0.07595842331647873, |
|
"learning_rate": 0.0009793339265183304, |
|
"loss": 0.8028, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04819800260529744, |
|
"grad_norm": 0.07882706820964813, |
|
"learning_rate": 0.0009788754083424652, |
|
"loss": 0.7825, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 0.09278842061758041, |
|
"learning_rate": 0.0009784119689808543, |
|
"loss": 0.7897, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.049066435084672164, |
|
"grad_norm": 0.07860872894525528, |
|
"learning_rate": 0.0009779436131960543, |
|
"loss": 0.8193, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04950065132435953, |
|
"grad_norm": 0.09767181426286697, |
|
"learning_rate": 0.0009774703458011453, |
|
"loss": 0.7653, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.04993486756404689, |
|
"grad_norm": 0.09374384582042694, |
|
"learning_rate": 0.000976992171659682, |
|
"loss": 0.7541, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05036908380373426, |
|
"grad_norm": 0.07632128894329071, |
|
"learning_rate": 0.0009765090956856436, |
|
"loss": 0.7518, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.05080330004342162, |
|
"grad_norm": 0.07544126361608505, |
|
"learning_rate": 0.0009760211228433832, |
|
"loss": 0.8217, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05123751628310899, |
|
"grad_norm": 0.07379148155450821, |
|
"learning_rate": 0.0009755282581475768, |
|
"loss": 0.7902, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05167173252279635, |
|
"grad_norm": 0.07656335830688477, |
|
"learning_rate": 0.0009750305066631716, |
|
"loss": 0.7357, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.052105948762483714, |
|
"grad_norm": 0.07699162513017654, |
|
"learning_rate": 0.0009745278735053344, |
|
"loss": 0.7329, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05254016500217108, |
|
"grad_norm": 0.07921908050775528, |
|
"learning_rate": 0.0009740203638393983, |
|
"loss": 0.7368, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.052974381241858444, |
|
"grad_norm": 0.06974276900291443, |
|
"learning_rate": 0.0009735079828808107, |
|
"loss": 0.7593, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.05340859748154581, |
|
"grad_norm": 0.06961283832788467, |
|
"learning_rate": 0.0009729907358950785, |
|
"loss": 0.7899, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.053842813721233174, |
|
"grad_norm": 0.07279238104820251, |
|
"learning_rate": 0.0009724686281977146, |
|
"loss": 0.7455, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.054277029960920535, |
|
"grad_norm": 0.07039442658424377, |
|
"learning_rate": 0.0009719416651541838, |
|
"loss": 0.7755, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 0.07618307322263718, |
|
"learning_rate": 0.0009714098521798464, |
|
"loss": 0.7631, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.055145462440295265, |
|
"grad_norm": 0.06525314599275589, |
|
"learning_rate": 0.0009708731947399038, |
|
"loss": 0.7605, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.05557967867998263, |
|
"grad_norm": 0.07441145926713943, |
|
"learning_rate": 0.0009703316983493413, |
|
"loss": 0.7402, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.056013894919669995, |
|
"grad_norm": 0.06484754383563995, |
|
"learning_rate": 0.0009697853685728721, |
|
"loss": 0.7238, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.05644811115935736, |
|
"grad_norm": 0.07414862513542175, |
|
"learning_rate": 0.0009692342110248802, |
|
"loss": 0.7633, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.056882327399044724, |
|
"grad_norm": 0.07470294088125229, |
|
"learning_rate": 0.0009686782313693621, |
|
"loss": 0.7467, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.057316543638732086, |
|
"grad_norm": 0.07042668759822845, |
|
"learning_rate": 0.0009681174353198686, |
|
"loss": 0.7309, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.057750759878419454, |
|
"grad_norm": 0.07646633684635162, |
|
"learning_rate": 0.0009675518286394472, |
|
"loss": 0.722, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.058184976118106815, |
|
"grad_norm": 0.07386059314012527, |
|
"learning_rate": 0.0009669814171405816, |
|
"loss": 0.7028, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.058619192357794184, |
|
"grad_norm": 0.07046350091695786, |
|
"learning_rate": 0.0009664062066851325, |
|
"loss": 0.711, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.059053408597481545, |
|
"grad_norm": 0.07790421694517136, |
|
"learning_rate": 0.000965826203184277, |
|
"loss": 0.7183, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05948762483716891, |
|
"grad_norm": 0.08006292581558228, |
|
"learning_rate": 0.000965241412598449, |
|
"loss": 0.7534, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.059921841076856275, |
|
"grad_norm": 0.06924112141132355, |
|
"learning_rate": 0.0009646518409372759, |
|
"loss": 0.7401, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.060356057316543636, |
|
"grad_norm": 0.07095640152692795, |
|
"learning_rate": 0.0009640574942595195, |
|
"loss": 0.7056, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 0.0727936401963234, |
|
"learning_rate": 0.0009634583786730109, |
|
"loss": 0.6811, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.061224489795918366, |
|
"grad_norm": 0.0759989321231842, |
|
"learning_rate": 0.0009628545003345899, |
|
"loss": 0.719, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.061658706035605734, |
|
"grad_norm": 0.07433141767978668, |
|
"learning_rate": 0.0009622458654500408, |
|
"loss": 0.724, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.062092922275293096, |
|
"grad_norm": 0.06923756748437881, |
|
"learning_rate": 0.0009616324802740286, |
|
"loss": 0.7562, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.06252713851498046, |
|
"grad_norm": 0.10080388188362122, |
|
"learning_rate": 0.0009610143511100353, |
|
"loss": 0.7116, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.06296135475466783, |
|
"grad_norm": 0.07774697989225388, |
|
"learning_rate": 0.0009603914843102941, |
|
"loss": 0.7277, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0633955709943552, |
|
"grad_norm": 0.07297523319721222, |
|
"learning_rate": 0.0009597638862757254, |
|
"loss": 0.6817, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.06382978723404255, |
|
"grad_norm": 0.07876230776309967, |
|
"learning_rate": 0.0009591315634558697, |
|
"loss": 0.7015, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.06426400347372992, |
|
"grad_norm": 0.07196466624736786, |
|
"learning_rate": 0.0009584945223488226, |
|
"loss": 0.6995, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.06469821971341728, |
|
"grad_norm": 0.07307913899421692, |
|
"learning_rate": 0.0009578527695011669, |
|
"loss": 0.6931, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.06513243595310464, |
|
"grad_norm": 0.0770203247666359, |
|
"learning_rate": 0.0009572063115079062, |
|
"loss": 0.7135, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06556665219279201, |
|
"grad_norm": 0.06902311742305756, |
|
"learning_rate": 0.0009565551550123967, |
|
"loss": 0.6906, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.06600086843247938, |
|
"grad_norm": 0.10059111565351486, |
|
"learning_rate": 0.0009558993067062785, |
|
"loss": 0.7237, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.06643508467216674, |
|
"grad_norm": 0.07180771976709366, |
|
"learning_rate": 0.000955238773329408, |
|
"loss": 0.6929, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0668693009118541, |
|
"grad_norm": 0.07852017134428024, |
|
"learning_rate": 0.0009545735616697875, |
|
"loss": 0.6531, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.06730351715154147, |
|
"grad_norm": 0.07011571526527405, |
|
"learning_rate": 0.0009539036785634961, |
|
"loss": 0.6949, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06773773339122884, |
|
"grad_norm": 0.07384242117404938, |
|
"learning_rate": 0.000953229130894619, |
|
"loss": 0.688, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.06817194963091619, |
|
"grad_norm": 0.07481442391872406, |
|
"learning_rate": 0.0009525499255951775, |
|
"loss": 0.6969, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.06860616587060356, |
|
"grad_norm": 0.06441578269004822, |
|
"learning_rate": 0.0009518660696450568, |
|
"loss": 0.6478, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.06904038211029093, |
|
"grad_norm": 0.06720245629549026, |
|
"learning_rate": 0.0009511775700719346, |
|
"loss": 0.677, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0694745983499783, |
|
"grad_norm": 0.08833543956279755, |
|
"learning_rate": 0.0009504844339512095, |
|
"loss": 0.6912, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06990881458966565, |
|
"grad_norm": 0.08247049152851105, |
|
"learning_rate": 0.0009497866684059277, |
|
"loss": 0.6833, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07034303082935302, |
|
"grad_norm": 0.06711483001708984, |
|
"learning_rate": 0.0009490842806067095, |
|
"loss": 0.6695, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.07077724706904039, |
|
"grad_norm": 0.06512755900621414, |
|
"learning_rate": 0.0009483772777716766, |
|
"loss": 0.695, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.07121146330872774, |
|
"grad_norm": 0.06651882827281952, |
|
"learning_rate": 0.0009476656671663766, |
|
"loss": 0.6608, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.07164567954841511, |
|
"grad_norm": 0.06416794657707214, |
|
"learning_rate": 0.0009469494561037098, |
|
"loss": 0.6918, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07207989578810248, |
|
"grad_norm": 0.06808705627918243, |
|
"learning_rate": 0.000946228651943853, |
|
"loss": 0.6959, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.07251411202778985, |
|
"grad_norm": 0.077076256275177, |
|
"learning_rate": 0.0009455032620941839, |
|
"loss": 0.6737, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0729483282674772, |
|
"grad_norm": 0.09289199858903885, |
|
"learning_rate": 0.000944773294009206, |
|
"loss": 0.651, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.07338254450716457, |
|
"grad_norm": 0.07365357875823975, |
|
"learning_rate": 0.0009440387551904703, |
|
"loss": 0.6778, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.07381676074685194, |
|
"grad_norm": 0.0635438859462738, |
|
"learning_rate": 0.0009432996531865001, |
|
"loss": 0.7081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07425097698653929, |
|
"grad_norm": 0.07002374529838562, |
|
"learning_rate": 0.0009425559955927117, |
|
"loss": 0.6505, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.07468519322622666, |
|
"grad_norm": 0.08253403007984161, |
|
"learning_rate": 0.0009418077900513376, |
|
"loss": 0.6752, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.07511940946591403, |
|
"grad_norm": 0.06753169745206833, |
|
"learning_rate": 0.0009410550442513475, |
|
"loss": 0.6695, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.0755536257056014, |
|
"grad_norm": 0.06907963007688522, |
|
"learning_rate": 0.000940297765928369, |
|
"loss": 0.6441, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.07598784194528875, |
|
"grad_norm": 0.0881882980465889, |
|
"learning_rate": 0.0009395359628646086, |
|
"loss": 0.6456, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07642205818497612, |
|
"grad_norm": 0.06529023498296738, |
|
"learning_rate": 0.0009387696428887715, |
|
"loss": 0.6689, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.07685627442466349, |
|
"grad_norm": 0.06664346903562546, |
|
"learning_rate": 0.0009379988138759809, |
|
"loss": 0.6485, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.07729049066435084, |
|
"grad_norm": 0.06951016932725906, |
|
"learning_rate": 0.0009372234837476979, |
|
"loss": 0.68, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.07772470690403821, |
|
"grad_norm": 0.060999054461717606, |
|
"learning_rate": 0.0009364436604716389, |
|
"loss": 0.6706, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.07815892314372558, |
|
"grad_norm": 0.07197262346744537, |
|
"learning_rate": 0.0009356593520616947, |
|
"loss": 0.6277, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07859313938341293, |
|
"grad_norm": 0.06676187366247177, |
|
"learning_rate": 0.0009348705665778478, |
|
"loss": 0.6492, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.0790273556231003, |
|
"grad_norm": 0.07378092408180237, |
|
"learning_rate": 0.0009340773121260893, |
|
"loss": 0.6375, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.07946157186278767, |
|
"grad_norm": 0.06367610394954681, |
|
"learning_rate": 0.000933279596858336, |
|
"loss": 0.6221, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.07989578810247504, |
|
"grad_norm": 0.0669325664639473, |
|
"learning_rate": 0.0009324774289723468, |
|
"loss": 0.6294, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.08033000434216239, |
|
"grad_norm": 0.06983687728643417, |
|
"learning_rate": 0.0009316708167116377, |
|
"loss": 0.6487, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08076422058184976, |
|
"grad_norm": 0.06569714099168777, |
|
"learning_rate": 0.0009308597683653976, |
|
"loss": 0.651, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.08119843682153713, |
|
"grad_norm": 0.06825820356607437, |
|
"learning_rate": 0.0009300442922684032, |
|
"loss": 0.6193, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.08163265306122448, |
|
"grad_norm": 0.07318625599145889, |
|
"learning_rate": 0.000929224396800933, |
|
"loss": 0.7024, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.08206686930091185, |
|
"grad_norm": 0.1047971323132515, |
|
"learning_rate": 0.0009284000903886818, |
|
"loss": 0.6637, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.08250108554059922, |
|
"grad_norm": 0.06583527475595474, |
|
"learning_rate": 0.0009275713815026732, |
|
"loss": 0.6419, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08293530178028659, |
|
"grad_norm": 0.09143144637346268, |
|
"learning_rate": 0.000926738278659173, |
|
"loss": 0.6295, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.08336951801997394, |
|
"grad_norm": 0.06756502389907837, |
|
"learning_rate": 0.0009259007904196022, |
|
"loss": 0.6718, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.08380373425966131, |
|
"grad_norm": 0.06964406371116638, |
|
"learning_rate": 0.000925058925390448, |
|
"loss": 0.6542, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.08423795049934868, |
|
"grad_norm": 0.07476377487182617, |
|
"learning_rate": 0.0009242126922231762, |
|
"loss": 0.6266, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.08467216673903603, |
|
"grad_norm": 0.07210344076156616, |
|
"learning_rate": 0.0009233620996141421, |
|
"loss": 0.6721, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 0.07828934490680695, |
|
"learning_rate": 0.0009225071563045006, |
|
"loss": 0.6223, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.08554059921841077, |
|
"grad_norm": 0.07536309212446213, |
|
"learning_rate": 0.000921647871080117, |
|
"loss": 0.6563, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.08597481545809814, |
|
"grad_norm": 0.07678169757127762, |
|
"learning_rate": 0.0009207842527714766, |
|
"loss": 0.6646, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.0864090316977855, |
|
"grad_norm": 0.07808982580900192, |
|
"learning_rate": 0.0009199163102535937, |
|
"loss": 0.6497, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.08684324793747286, |
|
"grad_norm": 0.07775306701660156, |
|
"learning_rate": 0.0009190440524459203, |
|
"loss": 0.6083, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08727746417716023, |
|
"grad_norm": 0.06569099426269531, |
|
"learning_rate": 0.0009181674883122553, |
|
"loss": 0.6424, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.08771168041684758, |
|
"grad_norm": 0.08122856914997101, |
|
"learning_rate": 0.0009172866268606513, |
|
"loss": 0.6054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.08814589665653495, |
|
"grad_norm": 0.08045452833175659, |
|
"learning_rate": 0.0009164014771433226, |
|
"loss": 0.6372, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.08858011289622232, |
|
"grad_norm": 0.0744997188448906, |
|
"learning_rate": 0.000915512048256552, |
|
"loss": 0.6341, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.08901432913590969, |
|
"grad_norm": 0.07247951626777649, |
|
"learning_rate": 0.0009146183493405975, |
|
"loss": 0.5922, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.08944854537559704, |
|
"grad_norm": 0.08652166277170181, |
|
"learning_rate": 0.0009137203895795982, |
|
"loss": 0.6111, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.08988276161528441, |
|
"grad_norm": 0.06636743992567062, |
|
"learning_rate": 0.0009128181782014801, |
|
"loss": 0.6287, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.09031697785497178, |
|
"grad_norm": 0.08643815666437149, |
|
"learning_rate": 0.0009119117244778608, |
|
"loss": 0.6091, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.09075119409465914, |
|
"grad_norm": 0.08439099788665771, |
|
"learning_rate": 0.0009110010377239551, |
|
"loss": 0.6735, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.0911854103343465, |
|
"grad_norm": 0.0863455981016159, |
|
"learning_rate": 0.0009100861272984779, |
|
"loss": 0.6049, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09161962657403387, |
|
"grad_norm": 0.06976283341646194, |
|
"learning_rate": 0.0009091670026035499, |
|
"loss": 0.5934, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.09205384281372124, |
|
"grad_norm": 0.10158892720937729, |
|
"learning_rate": 0.0009082436730845993, |
|
"loss": 0.6314, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.0924880590534086, |
|
"grad_norm": 0.06954232603311539, |
|
"learning_rate": 0.0009073161482302654, |
|
"loss": 0.6461, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.09292227529309596, |
|
"grad_norm": 0.06838048249483109, |
|
"learning_rate": 0.0009063844375723014, |
|
"loss": 0.5993, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.09335649153278333, |
|
"grad_norm": 0.06874913722276688, |
|
"learning_rate": 0.0009054485506854755, |
|
"loss": 0.6035, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.09379070777247069, |
|
"grad_norm": 0.07478207349777222, |
|
"learning_rate": 0.0009045084971874737, |
|
"loss": 0.6123, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.09422492401215805, |
|
"grad_norm": 0.06408224254846573, |
|
"learning_rate": 0.0009035642867388002, |
|
"loss": 0.6242, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.09465914025184542, |
|
"grad_norm": 0.07433444261550903, |
|
"learning_rate": 0.0009026159290426781, |
|
"loss": 0.6055, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.09509335649153278, |
|
"grad_norm": 0.06954223662614822, |
|
"learning_rate": 0.0009016634338449503, |
|
"loss": 0.6257, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.09552757273122015, |
|
"grad_norm": 0.06509328633546829, |
|
"learning_rate": 0.0009007068109339783, |
|
"loss": 0.5749, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.09596178897090751, |
|
"grad_norm": 0.08184398710727692, |
|
"learning_rate": 0.000899746070140543, |
|
"loss": 0.6166, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.09639600521059488, |
|
"grad_norm": 0.06200498715043068, |
|
"learning_rate": 0.0008987812213377423, |
|
"loss": 0.615, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.09683022145028224, |
|
"grad_norm": 0.06602746993303299, |
|
"learning_rate": 0.0008978122744408905, |
|
"loss": 0.6087, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.0972644376899696, |
|
"grad_norm": 0.0869906097650528, |
|
"learning_rate": 0.0008968392394074163, |
|
"loss": 0.5725, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.09769865392965697, |
|
"grad_norm": 0.064040407538414, |
|
"learning_rate": 0.0008958621262367599, |
|
"loss": 0.6116, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.09813287016934433, |
|
"grad_norm": 0.0657719075679779, |
|
"learning_rate": 0.0008948809449702712, |
|
"loss": 0.6219, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.0985670864090317, |
|
"grad_norm": 0.07608803361654282, |
|
"learning_rate": 0.0008938957056911057, |
|
"loss": 0.6018, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.09900130264871906, |
|
"grad_norm": 0.07249824702739716, |
|
"learning_rate": 0.0008929064185241212, |
|
"loss": 0.6027, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.09943551888840643, |
|
"grad_norm": 0.06748249381780624, |
|
"learning_rate": 0.0008919130936357742, |
|
"loss": 0.6032, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.09986973512809379, |
|
"grad_norm": 0.08020893484354019, |
|
"learning_rate": 0.000890915741234015, |
|
"loss": 0.607, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.10030395136778116, |
|
"grad_norm": 0.0669545903801918, |
|
"learning_rate": 0.0008899143715681822, |
|
"loss": 0.5974, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.10073816760746852, |
|
"grad_norm": 0.06186607852578163, |
|
"learning_rate": 0.0008889089949288987, |
|
"loss": 0.6186, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.10117238384715588, |
|
"grad_norm": 0.06386630982160568, |
|
"learning_rate": 0.0008878996216479651, |
|
"loss": 0.5995, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.10160660008684325, |
|
"grad_norm": 0.06557858735322952, |
|
"learning_rate": 0.0008868862620982534, |
|
"loss": 0.6066, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.10204081632653061, |
|
"grad_norm": 0.06579785794019699, |
|
"learning_rate": 0.0008858689266936008, |
|
"loss": 0.6144, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.10247503256621798, |
|
"grad_norm": 0.06238356605172157, |
|
"learning_rate": 0.0008848476258887031, |
|
"loss": 0.5899, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.10290924880590534, |
|
"grad_norm": 0.061628226190805435, |
|
"learning_rate": 0.0008838223701790055, |
|
"loss": 0.6092, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.1033434650455927, |
|
"grad_norm": 0.06545189768075943, |
|
"learning_rate": 0.0008827931701005973, |
|
"loss": 0.6014, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.10377768128528007, |
|
"grad_norm": 0.056564707309007645, |
|
"learning_rate": 0.0008817600362301017, |
|
"loss": 0.5959, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.10421189752496743, |
|
"grad_norm": 0.0715203583240509, |
|
"learning_rate": 0.0008807229791845672, |
|
"loss": 0.5942, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1046461137646548, |
|
"grad_norm": 0.06504496932029724, |
|
"learning_rate": 0.00087968200962136, |
|
"loss": 0.5652, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.10508033000434217, |
|
"grad_norm": 0.06740335375070572, |
|
"learning_rate": 0.0008786371382380527, |
|
"loss": 0.6159, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.10551454624402953, |
|
"grad_norm": 0.06163683906197548, |
|
"learning_rate": 0.0008775883757723155, |
|
"loss": 0.5913, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.10594876248371689, |
|
"grad_norm": 0.07384572178125381, |
|
"learning_rate": 0.0008765357330018055, |
|
"loss": 0.5829, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.10638297872340426, |
|
"grad_norm": 0.06352236866950989, |
|
"learning_rate": 0.0008754792207440556, |
|
"loss": 0.6069, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.10681719496309162, |
|
"grad_norm": 0.0605890229344368, |
|
"learning_rate": 0.0008744188498563641, |
|
"loss": 0.5685, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.10725141120277898, |
|
"grad_norm": 0.06994660943746567, |
|
"learning_rate": 0.0008733546312356824, |
|
"loss": 0.581, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.10768562744246635, |
|
"grad_norm": 0.08128967881202698, |
|
"learning_rate": 0.0008722865758185036, |
|
"loss": 0.5754, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.10811984368215372, |
|
"grad_norm": 0.06497329473495483, |
|
"learning_rate": 0.0008712146945807493, |
|
"loss": 0.598, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.10855405992184107, |
|
"grad_norm": 0.06091492623090744, |
|
"learning_rate": 0.0008701389985376577, |
|
"loss": 0.5807, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.10898827616152844, |
|
"grad_norm": 0.06555697321891785, |
|
"learning_rate": 0.0008690594987436704, |
|
"loss": 0.5768, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.1094224924012158, |
|
"grad_norm": 0.07119280844926834, |
|
"learning_rate": 0.0008679762062923176, |
|
"loss": 0.6088, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.10985670864090318, |
|
"grad_norm": 0.06837710738182068, |
|
"learning_rate": 0.0008668891323161053, |
|
"loss": 0.5905, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.11029092488059053, |
|
"grad_norm": 0.067451111972332, |
|
"learning_rate": 0.0008657982879864007, |
|
"loss": 0.5974, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.1107251411202779, |
|
"grad_norm": 0.06649015098810196, |
|
"learning_rate": 0.0008647036845133172, |
|
"loss": 0.5684, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.11115935735996527, |
|
"grad_norm": 0.06311678886413574, |
|
"learning_rate": 0.0008636053331455986, |
|
"loss": 0.58, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.11159357359965262, |
|
"grad_norm": 0.06087026745080948, |
|
"learning_rate": 0.0008625032451705052, |
|
"loss": 0.5735, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.11202778983933999, |
|
"grad_norm": 0.061150964349508286, |
|
"learning_rate": 0.0008613974319136957, |
|
"loss": 0.6044, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.11246200607902736, |
|
"grad_norm": 0.06023947522044182, |
|
"learning_rate": 0.0008602879047391126, |
|
"loss": 0.551, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.11289622231871473, |
|
"grad_norm": 0.06919502466917038, |
|
"learning_rate": 0.0008591746750488638, |
|
"loss": 0.5744, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.11333043855840208, |
|
"grad_norm": 0.06585050374269485, |
|
"learning_rate": 0.0008580577542831072, |
|
"loss": 0.6053, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.11376465479808945, |
|
"grad_norm": 0.06967171281576157, |
|
"learning_rate": 0.0008569371539199316, |
|
"loss": 0.5896, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.11419887103777682, |
|
"grad_norm": 0.062438324093818665, |
|
"learning_rate": 0.0008558128854752396, |
|
"loss": 0.585, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.11463308727746417, |
|
"grad_norm": 0.06600722670555115, |
|
"learning_rate": 0.0008546849605026289, |
|
"loss": 0.5809, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.11506730351715154, |
|
"grad_norm": 0.06594278663396835, |
|
"learning_rate": 0.0008535533905932737, |
|
"loss": 0.5854, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11550151975683891, |
|
"grad_norm": 0.06402723491191864, |
|
"learning_rate": 0.0008524181873758059, |
|
"loss": 0.5612, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.11593573599652628, |
|
"grad_norm": 0.06593722105026245, |
|
"learning_rate": 0.0008512793625161946, |
|
"loss": 0.5825, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.11636995223621363, |
|
"grad_norm": 0.057960718870162964, |
|
"learning_rate": 0.0008501369277176275, |
|
"loss": 0.5972, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.116804168475901, |
|
"grad_norm": 0.06126386299729347, |
|
"learning_rate": 0.0008489908947203897, |
|
"loss": 0.5675, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.11723838471558837, |
|
"grad_norm": 0.06619458645582199, |
|
"learning_rate": 0.0008478412753017432, |
|
"loss": 0.5724, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11767260095527572, |
|
"grad_norm": 0.05950764939188957, |
|
"learning_rate": 0.0008466880812758065, |
|
"loss": 0.5732, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.11810681719496309, |
|
"grad_norm": 0.0596248060464859, |
|
"learning_rate": 0.0008455313244934324, |
|
"loss": 0.5844, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.11854103343465046, |
|
"grad_norm": 0.06557915359735489, |
|
"learning_rate": 0.0008443710168420865, |
|
"loss": 0.5706, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.11897524967433783, |
|
"grad_norm": 0.06171106547117233, |
|
"learning_rate": 0.0008432071702457253, |
|
"loss": 0.561, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.11940946591402518, |
|
"grad_norm": 0.06009051948785782, |
|
"learning_rate": 0.0008420397966646731, |
|
"loss": 0.5764, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.11984368215371255, |
|
"grad_norm": 0.06843849271535873, |
|
"learning_rate": 0.0008408689080954998, |
|
"loss": 0.5833, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.12027789839339992, |
|
"grad_norm": 0.06061152368783951, |
|
"learning_rate": 0.0008396945165708972, |
|
"loss": 0.5962, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.12071211463308727, |
|
"grad_norm": 0.05997680127620697, |
|
"learning_rate": 0.0008385166341595548, |
|
"loss": 0.5738, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.12114633087277464, |
|
"grad_norm": 0.06045431271195412, |
|
"learning_rate": 0.0008373352729660373, |
|
"loss": 0.5738, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.12158054711246201, |
|
"grad_norm": 0.06007954850792885, |
|
"learning_rate": 0.0008361504451306584, |
|
"loss": 0.5753, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.12201476335214936, |
|
"grad_norm": 0.05806737020611763, |
|
"learning_rate": 0.0008349621628293577, |
|
"loss": 0.5703, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.12244897959183673, |
|
"grad_norm": 0.06177588179707527, |
|
"learning_rate": 0.0008337704382735741, |
|
"loss": 0.5533, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.1228831958315241, |
|
"grad_norm": 0.06487903743982315, |
|
"learning_rate": 0.0008325752837101213, |
|
"loss": 0.5655, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.12331741207121147, |
|
"grad_norm": 0.06531866639852524, |
|
"learning_rate": 0.0008313767114210615, |
|
"loss": 0.5694, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.12375162831089882, |
|
"grad_norm": 0.08110279589891434, |
|
"learning_rate": 0.0008301747337235797, |
|
"loss": 0.5517, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12418584455058619, |
|
"grad_norm": 0.06367824971675873, |
|
"learning_rate": 0.0008289693629698564, |
|
"loss": 0.5474, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.12462006079027356, |
|
"grad_norm": 0.07917732000350952, |
|
"learning_rate": 0.0008277606115469409, |
|
"loss": 0.5685, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.12505427702996091, |
|
"grad_norm": 0.06585974991321564, |
|
"learning_rate": 0.0008265484918766243, |
|
"loss": 0.5547, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.12548849326964828, |
|
"grad_norm": 0.0627543181180954, |
|
"learning_rate": 0.0008253330164153117, |
|
"loss": 0.5795, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.12592270950933565, |
|
"grad_norm": 0.06639768183231354, |
|
"learning_rate": 0.0008241141976538943, |
|
"loss": 0.5669, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.12635692574902302, |
|
"grad_norm": 0.06217719987034798, |
|
"learning_rate": 0.0008228920481176202, |
|
"loss": 0.5552, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.1267911419887104, |
|
"grad_norm": 0.06806106120347977, |
|
"learning_rate": 0.0008216665803659671, |
|
"loss": 0.5553, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.12722535822839776, |
|
"grad_norm": 0.058773159980773926, |
|
"learning_rate": 0.000820437806992512, |
|
"loss": 0.5678, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 0.06487017124891281, |
|
"learning_rate": 0.0008192057406248028, |
|
"loss": 0.5493, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.12809379070777246, |
|
"grad_norm": 0.05514706298708916, |
|
"learning_rate": 0.0008179703939242276, |
|
"loss": 0.5834, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.12852800694745983, |
|
"grad_norm": 0.05893951281905174, |
|
"learning_rate": 0.0008167317795858851, |
|
"loss": 0.5668, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.1289622231871472, |
|
"grad_norm": 0.06080016866326332, |
|
"learning_rate": 0.0008154899103384537, |
|
"loss": 0.6112, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.12939643942683457, |
|
"grad_norm": 0.06676195561885834, |
|
"learning_rate": 0.0008142447989440618, |
|
"loss": 0.5897, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.12983065566652194, |
|
"grad_norm": 0.055440496653318405, |
|
"learning_rate": 0.0008129964581981553, |
|
"loss": 0.5199, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.13026487190620928, |
|
"grad_norm": 0.0642077848315239, |
|
"learning_rate": 0.0008117449009293668, |
|
"loss": 0.5688, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13069908814589665, |
|
"grad_norm": 0.056085068732500076, |
|
"learning_rate": 0.0008104901399993836, |
|
"loss": 0.5626, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.13113330438558402, |
|
"grad_norm": 0.07240674644708633, |
|
"learning_rate": 0.0008092321883028157, |
|
"loss": 0.5705, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.13156752062527138, |
|
"grad_norm": 0.06402257084846497, |
|
"learning_rate": 0.0008079710587670633, |
|
"loss": 0.5608, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.13200173686495875, |
|
"grad_norm": 0.05889822542667389, |
|
"learning_rate": 0.0008067067643521834, |
|
"loss": 0.5552, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.13243595310464612, |
|
"grad_norm": 0.07787332683801651, |
|
"learning_rate": 0.0008054393180507572, |
|
"loss": 0.5526, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1328701693443335, |
|
"grad_norm": 0.06866924464702606, |
|
"learning_rate": 0.0008041687328877566, |
|
"loss": 0.5726, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.13330438558402083, |
|
"grad_norm": 0.06494660675525665, |
|
"learning_rate": 0.00080289502192041, |
|
"loss": 0.5605, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.1337386018237082, |
|
"grad_norm": 0.06333691626787186, |
|
"learning_rate": 0.0008016181982380681, |
|
"loss": 0.5668, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.13417281806339557, |
|
"grad_norm": 0.05925397574901581, |
|
"learning_rate": 0.0008003382749620702, |
|
"loss": 0.539, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.13460703430308293, |
|
"grad_norm": 0.06211424991488457, |
|
"learning_rate": 0.000799055265245608, |
|
"loss": 0.5581, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1350412505427703, |
|
"grad_norm": 0.06610213220119476, |
|
"learning_rate": 0.0007977691822735914, |
|
"loss": 0.5726, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.13547546678245767, |
|
"grad_norm": 0.06418436765670776, |
|
"learning_rate": 0.0007964800392625129, |
|
"loss": 0.5616, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.13590968302214504, |
|
"grad_norm": 0.05993996933102608, |
|
"learning_rate": 0.0007951878494603115, |
|
"loss": 0.5392, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.13634389926183238, |
|
"grad_norm": 0.05776641145348549, |
|
"learning_rate": 0.0007938926261462366, |
|
"loss": 0.5402, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.13677811550151975, |
|
"grad_norm": 0.06915484368801117, |
|
"learning_rate": 0.0007925943826307118, |
|
"loss": 0.5336, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.13721233174120712, |
|
"grad_norm": 0.06394239515066147, |
|
"learning_rate": 0.0007912931322551981, |
|
"loss": 0.5647, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.13764654798089448, |
|
"grad_norm": 0.05997829511761665, |
|
"learning_rate": 0.000789988888392056, |
|
"loss": 0.5588, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.13808076422058185, |
|
"grad_norm": 0.07337518781423569, |
|
"learning_rate": 0.0007886816644444098, |
|
"loss": 0.5825, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.13851498046026922, |
|
"grad_norm": 0.07288537919521332, |
|
"learning_rate": 0.0007873714738460075, |
|
"loss": 0.5615, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.1389491966999566, |
|
"grad_norm": 0.06601582467556, |
|
"learning_rate": 0.0007860583300610849, |
|
"loss": 0.5545, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13938341293964393, |
|
"grad_norm": 0.06303299218416214, |
|
"learning_rate": 0.000784742246584226, |
|
"loss": 0.5579, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.1398176291793313, |
|
"grad_norm": 0.06565733999013901, |
|
"learning_rate": 0.000783423236940225, |
|
"loss": 0.5728, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.14025184541901867, |
|
"grad_norm": 0.06590873748064041, |
|
"learning_rate": 0.0007821013146839467, |
|
"loss": 0.5552, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.14068606165870604, |
|
"grad_norm": 0.06782017648220062, |
|
"learning_rate": 0.0007807764934001874, |
|
"loss": 0.5299, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.1411202778983934, |
|
"grad_norm": 0.06369118392467499, |
|
"learning_rate": 0.0007794487867035358, |
|
"loss": 0.524, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.14155449413808077, |
|
"grad_norm": 0.06337650865316391, |
|
"learning_rate": 0.0007781182082382324, |
|
"loss": 0.5802, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.14198871037776814, |
|
"grad_norm": 0.07011637836694717, |
|
"learning_rate": 0.0007767847716780296, |
|
"loss": 0.5569, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.14242292661745548, |
|
"grad_norm": 0.06176460534334183, |
|
"learning_rate": 0.0007754484907260512, |
|
"loss": 0.5266, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.06650613993406296, |
|
"learning_rate": 0.0007741093791146516, |
|
"loss": 0.5628, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.14329135909683022, |
|
"grad_norm": 0.07864295691251755, |
|
"learning_rate": 0.0007727674506052743, |
|
"loss": 0.5683, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.14372557533651759, |
|
"grad_norm": 0.06307731568813324, |
|
"learning_rate": 0.0007714227189883112, |
|
"loss": 0.5705, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.14415979157620495, |
|
"grad_norm": 0.07688986510038376, |
|
"learning_rate": 0.0007700751980829601, |
|
"loss": 0.5714, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.14459400781589232, |
|
"grad_norm": 0.06042364984750748, |
|
"learning_rate": 0.0007687249017370832, |
|
"loss": 0.5629, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.1450282240555797, |
|
"grad_norm": 0.06250355392694473, |
|
"learning_rate": 0.0007673718438270648, |
|
"loss": 0.5318, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.14546244029526703, |
|
"grad_norm": 0.06661161035299301, |
|
"learning_rate": 0.0007660160382576683, |
|
"loss": 0.5261, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1458966565349544, |
|
"grad_norm": 0.07277819514274597, |
|
"learning_rate": 0.0007646574989618937, |
|
"loss": 0.5427, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.14633087277464177, |
|
"grad_norm": 0.05938050523400307, |
|
"learning_rate": 0.0007632962399008341, |
|
"loss": 0.5469, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.14676508901432914, |
|
"grad_norm": 0.0626835823059082, |
|
"learning_rate": 0.0007619322750635327, |
|
"loss": 0.5321, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.1471993052540165, |
|
"grad_norm": 0.06853507459163666, |
|
"learning_rate": 0.0007605656184668384, |
|
"loss": 0.5319, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.14763352149370387, |
|
"grad_norm": 0.06294318288564682, |
|
"learning_rate": 0.0007591962841552626, |
|
"loss": 0.5735, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.14806773773339124, |
|
"grad_norm": 0.06769460439682007, |
|
"learning_rate": 0.0007578242862008336, |
|
"loss": 0.5734, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.14850195397307858, |
|
"grad_norm": 0.06945870816707611, |
|
"learning_rate": 0.0007564496387029531, |
|
"loss": 0.5486, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.14893617021276595, |
|
"grad_norm": 0.055839743465185165, |
|
"learning_rate": 0.0007550723557882513, |
|
"loss": 0.5646, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.14937038645245332, |
|
"grad_norm": 0.05766444280743599, |
|
"learning_rate": 0.0007536924516104411, |
|
"loss": 0.5671, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.1498046026921407, |
|
"grad_norm": 0.05658617615699768, |
|
"learning_rate": 0.000752309940350173, |
|
"loss": 0.5573, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.15023881893182806, |
|
"grad_norm": 0.06456328183412552, |
|
"learning_rate": 0.0007509248362148888, |
|
"loss": 0.5413, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.15067303517151542, |
|
"grad_norm": 0.05971672758460045, |
|
"learning_rate": 0.000749537153438677, |
|
"loss": 0.528, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.1511072514112028, |
|
"grad_norm": 0.059805817902088165, |
|
"learning_rate": 0.0007481469062821251, |
|
"loss": 0.5517, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.15154146765089013, |
|
"grad_norm": 0.05796041712164879, |
|
"learning_rate": 0.0007467541090321735, |
|
"loss": 0.5651, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.1519756838905775, |
|
"grad_norm": 0.0807691365480423, |
|
"learning_rate": 0.000745358776001969, |
|
"loss": 0.5632, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.15240990013026487, |
|
"grad_norm": 0.056193944066762924, |
|
"learning_rate": 0.0007439609215307173, |
|
"loss": 0.5544, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.15284411636995224, |
|
"grad_norm": 0.06092338636517525, |
|
"learning_rate": 0.000742560559983536, |
|
"loss": 0.5378, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.1532783326096396, |
|
"grad_norm": 0.06231565400958061, |
|
"learning_rate": 0.0007411577057513066, |
|
"loss": 0.5584, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.15371254884932697, |
|
"grad_norm": 0.06663519144058228, |
|
"learning_rate": 0.000739752373250527, |
|
"loss": 0.5246, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.15414676508901434, |
|
"grad_norm": 0.060423221439123154, |
|
"learning_rate": 0.0007383445769231627, |
|
"loss": 0.5203, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.15458098132870168, |
|
"grad_norm": 0.06870024651288986, |
|
"learning_rate": 0.0007369343312364993, |
|
"loss": 0.52, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.15501519756838905, |
|
"grad_norm": 0.06956614553928375, |
|
"learning_rate": 0.0007355216506829932, |
|
"loss": 0.5816, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.15544941380807642, |
|
"grad_norm": 0.060565389692783356, |
|
"learning_rate": 0.0007341065497801229, |
|
"loss": 0.5508, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.1558836300477638, |
|
"grad_norm": 0.06745211035013199, |
|
"learning_rate": 0.0007326890430702396, |
|
"loss": 0.5589, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.15631784628745116, |
|
"grad_norm": 0.06009297072887421, |
|
"learning_rate": 0.0007312691451204177, |
|
"loss": 0.5259, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.15675206252713852, |
|
"grad_norm": 0.05682894587516785, |
|
"learning_rate": 0.0007298468705223059, |
|
"loss": 0.5689, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.15718627876682587, |
|
"grad_norm": 0.06062963977456093, |
|
"learning_rate": 0.0007284222338919758, |
|
"loss": 0.5339, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.15762049500651323, |
|
"grad_norm": 0.058002885431051254, |
|
"learning_rate": 0.0007269952498697733, |
|
"loss": 0.5714, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.1580547112462006, |
|
"grad_norm": 0.056898582726716995, |
|
"learning_rate": 0.0007255659331201672, |
|
"loss": 0.5811, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.15848892748588797, |
|
"grad_norm": 0.06246396526694298, |
|
"learning_rate": 0.0007241342983315984, |
|
"loss": 0.5554, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.15892314372557534, |
|
"grad_norm": 0.05486922711133957, |
|
"learning_rate": 0.0007227003602163296, |
|
"loss": 0.5567, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.1593573599652627, |
|
"grad_norm": 0.057243555784225464, |
|
"learning_rate": 0.0007212641335102932, |
|
"loss": 0.5193, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.15979157620495008, |
|
"grad_norm": 0.05640234053134918, |
|
"learning_rate": 0.0007198256329729411, |
|
"loss": 0.5098, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.16022579244463742, |
|
"grad_norm": 0.05819845199584961, |
|
"learning_rate": 0.0007183848733870917, |
|
"loss": 0.5366, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.16066000868432478, |
|
"grad_norm": 0.05703158304095268, |
|
"learning_rate": 0.0007169418695587791, |
|
"loss": 0.5374, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.16109422492401215, |
|
"grad_norm": 0.05363311246037483, |
|
"learning_rate": 0.0007154966363171003, |
|
"loss": 0.555, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.16152844116369952, |
|
"grad_norm": 0.060146309435367584, |
|
"learning_rate": 0.0007140491885140629, |
|
"loss": 0.5395, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.1619626574033869, |
|
"grad_norm": 0.05488353967666626, |
|
"learning_rate": 0.0007125995410244324, |
|
"loss": 0.5117, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.16239687364307426, |
|
"grad_norm": 0.05473377928137779, |
|
"learning_rate": 0.00071114770874558, |
|
"loss": 0.5463, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.16283108988276163, |
|
"grad_norm": 0.05373527109622955, |
|
"learning_rate": 0.0007096937065973285, |
|
"loss": 0.522, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.16326530612244897, |
|
"grad_norm": 0.054411571472883224, |
|
"learning_rate": 0.0007082375495217995, |
|
"loss": 0.519, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.16369952236213633, |
|
"grad_norm": 0.059973325580358505, |
|
"learning_rate": 0.0007067792524832604, |
|
"loss": 0.5546, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.1641337386018237, |
|
"grad_norm": 0.07386507093906403, |
|
"learning_rate": 0.000705318830467969, |
|
"loss": 0.548, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.16456795484151107, |
|
"grad_norm": 0.05318816378712654, |
|
"learning_rate": 0.0007038562984840216, |
|
"loss": 0.5325, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.16500217108119844, |
|
"grad_norm": 0.06362178921699524, |
|
"learning_rate": 0.0007023916715611969, |
|
"loss": 0.5537, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1654363873208858, |
|
"grad_norm": 0.05186508968472481, |
|
"learning_rate": 0.0007009249647508028, |
|
"loss": 0.5534, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.16587060356057318, |
|
"grad_norm": 0.05456273630261421, |
|
"learning_rate": 0.0006994561931255209, |
|
"loss": 0.533, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.16630481980026052, |
|
"grad_norm": 0.07069144397974014, |
|
"learning_rate": 0.0006979853717792523, |
|
"loss": 0.5591, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.16673903603994789, |
|
"grad_norm": 0.13737140595912933, |
|
"learning_rate": 0.0006965125158269618, |
|
"loss": 0.8425, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.16717325227963525, |
|
"grad_norm": 0.08646462857723236, |
|
"learning_rate": 0.0006950376404045235, |
|
"loss": 0.5356, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.16760746851932262, |
|
"grad_norm": 0.059485238045454025, |
|
"learning_rate": 0.0006935607606685641, |
|
"loss": 0.571, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.16804168475901, |
|
"grad_norm": 0.07368376851081848, |
|
"learning_rate": 0.000692081891796308, |
|
"loss": 0.5346, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.16847590099869736, |
|
"grad_norm": 0.0615500770509243, |
|
"learning_rate": 0.0006906010489854209, |
|
"loss": 0.5257, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.16891011723838473, |
|
"grad_norm": 0.064671091735363, |
|
"learning_rate": 0.0006891182474538539, |
|
"loss": 0.5492, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.16934433347807207, |
|
"grad_norm": 0.06681560724973679, |
|
"learning_rate": 0.0006876335024396872, |
|
"loss": 0.5595, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.16977854971775944, |
|
"grad_norm": 0.0575130432844162, |
|
"learning_rate": 0.0006861468292009726, |
|
"loss": 0.5895, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.05460560694336891, |
|
"learning_rate": 0.0006846582430155782, |
|
"loss": 0.5374, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.17064698219713417, |
|
"grad_norm": 0.06030447408556938, |
|
"learning_rate": 0.0006831677591810301, |
|
"loss": 0.6073, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.17108119843682154, |
|
"grad_norm": 0.0582847036421299, |
|
"learning_rate": 0.0006816753930143557, |
|
"loss": 0.5347, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.1715154146765089, |
|
"grad_norm": 0.05572697892785072, |
|
"learning_rate": 0.0006801811598519267, |
|
"loss": 0.5042, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.17194963091619628, |
|
"grad_norm": 0.05724466219544411, |
|
"learning_rate": 0.0006786850750493006, |
|
"loss": 0.5613, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.17238384715588362, |
|
"grad_norm": 0.06665828824043274, |
|
"learning_rate": 0.0006771871539810632, |
|
"loss": 0.5841, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.172818063395571, |
|
"grad_norm": 0.05465397238731384, |
|
"learning_rate": 0.0006756874120406714, |
|
"loss": 0.5636, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.17325227963525835, |
|
"grad_norm": 0.07157056033611298, |
|
"learning_rate": 0.000674185864640294, |
|
"loss": 0.561, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.17368649587494572, |
|
"grad_norm": 0.05561946704983711, |
|
"learning_rate": 0.0006726825272106538, |
|
"loss": 0.5351, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1741207121146331, |
|
"grad_norm": 0.05475137010216713, |
|
"learning_rate": 0.0006711774152008689, |
|
"loss": 0.5445, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.17455492835432046, |
|
"grad_norm": 0.07090818136930466, |
|
"learning_rate": 0.0006696705440782938, |
|
"loss": 0.5279, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.17498914459400783, |
|
"grad_norm": 0.051751721650362015, |
|
"learning_rate": 0.0006681619293283609, |
|
"loss": 0.4934, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.17542336083369517, |
|
"grad_norm": 0.06089828908443451, |
|
"learning_rate": 0.0006666515864544209, |
|
"loss": 0.5413, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.17585757707338254, |
|
"grad_norm": 0.052899301052093506, |
|
"learning_rate": 0.0006651395309775837, |
|
"loss": 0.5187, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.1762917933130699, |
|
"grad_norm": 0.05137433856725693, |
|
"learning_rate": 0.0006636257784365584, |
|
"loss": 0.5636, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.17672600955275727, |
|
"grad_norm": 0.060585830360651016, |
|
"learning_rate": 0.0006621103443874949, |
|
"loss": 0.5311, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.17716022579244464, |
|
"grad_norm": 0.052542008459568024, |
|
"learning_rate": 0.0006605932444038228, |
|
"loss": 0.5344, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.177594442032132, |
|
"grad_norm": 0.052282754331827164, |
|
"learning_rate": 0.0006590744940760914, |
|
"loss": 0.5616, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.17802865827181938, |
|
"grad_norm": 0.055943962186574936, |
|
"learning_rate": 0.0006575541090118104, |
|
"loss": 0.5657, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.17846287451150672, |
|
"grad_norm": 0.05422157794237137, |
|
"learning_rate": 0.0006560321048352886, |
|
"loss": 0.5482, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.1788970907511941, |
|
"grad_norm": 0.053238000720739365, |
|
"learning_rate": 0.0006545084971874737, |
|
"loss": 0.5477, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.17933130699088146, |
|
"grad_norm": 0.05666307359933853, |
|
"learning_rate": 0.0006529833017257919, |
|
"loss": 0.5593, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.17976552323056882, |
|
"grad_norm": 0.05702383071184158, |
|
"learning_rate": 0.000651456534123986, |
|
"loss": 0.5229, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.1801997394702562, |
|
"grad_norm": 0.05041234940290451, |
|
"learning_rate": 0.0006499282100719558, |
|
"loss": 0.5197, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.18063395570994356, |
|
"grad_norm": 0.05691606178879738, |
|
"learning_rate": 0.0006483983452755952, |
|
"loss": 0.5282, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.18106817194963093, |
|
"grad_norm": 0.05236973240971565, |
|
"learning_rate": 0.0006468669554566324, |
|
"loss": 0.574, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.18150238818931827, |
|
"grad_norm": 0.05194111168384552, |
|
"learning_rate": 0.0006453340563524669, |
|
"loss": 0.521, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.18193660442900564, |
|
"grad_norm": 0.05307863652706146, |
|
"learning_rate": 0.0006437996637160086, |
|
"loss": 0.5474, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.182370820668693, |
|
"grad_norm": 0.05186399817466736, |
|
"learning_rate": 0.0006422637933155162, |
|
"loss": 0.5584, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.18280503690838038, |
|
"grad_norm": 0.053380340337753296, |
|
"learning_rate": 0.0006407264609344343, |
|
"loss": 0.5309, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.18323925314806774, |
|
"grad_norm": 0.05926523730158806, |
|
"learning_rate": 0.0006391876823712317, |
|
"loss": 0.5362, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.1836734693877551, |
|
"grad_norm": 0.053086958825588226, |
|
"learning_rate": 0.0006376474734392387, |
|
"loss": 0.5345, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.18410768562744248, |
|
"grad_norm": 0.05506280064582825, |
|
"learning_rate": 0.0006361058499664855, |
|
"loss": 0.5301, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.18454190186712982, |
|
"grad_norm": 0.0567692294716835, |
|
"learning_rate": 0.0006345628277955385, |
|
"loss": 0.5336, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.1849761181068172, |
|
"grad_norm": 0.04989070072770119, |
|
"learning_rate": 0.0006330184227833376, |
|
"loss": 0.53, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.18541033434650456, |
|
"grad_norm": 0.05129670351743698, |
|
"learning_rate": 0.000631472650801034, |
|
"loss": 0.523, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.18584455058619193, |
|
"grad_norm": 0.052287109196186066, |
|
"learning_rate": 0.0006299255277338265, |
|
"loss": 0.5309, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.1862787668258793, |
|
"grad_norm": 0.051617227494716644, |
|
"learning_rate": 0.0006283770694807982, |
|
"loss": 0.5406, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.18671298306556666, |
|
"grad_norm": 0.06112902611494064, |
|
"learning_rate": 0.0006268272919547536, |
|
"loss": 0.5148, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.187147199305254, |
|
"grad_norm": 0.04904035106301308, |
|
"learning_rate": 0.0006252762110820547, |
|
"loss": 0.5277, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.18758141554494137, |
|
"grad_norm": 0.05905110388994217, |
|
"learning_rate": 0.0006237238428024572, |
|
"loss": 0.5148, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.18801563178462874, |
|
"grad_norm": 0.05047300457954407, |
|
"learning_rate": 0.000622170203068947, |
|
"loss": 0.5119, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.1884498480243161, |
|
"grad_norm": 0.0518287792801857, |
|
"learning_rate": 0.0006206153078475762, |
|
"loss": 0.5276, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.18888406426400348, |
|
"grad_norm": 0.04822736606001854, |
|
"learning_rate": 0.0006190591731172991, |
|
"loss": 0.5347, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.18931828050369084, |
|
"grad_norm": 0.052807264029979706, |
|
"learning_rate": 0.0006175018148698076, |
|
"loss": 0.5343, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.1897524967433782, |
|
"grad_norm": 0.054650746285915375, |
|
"learning_rate": 0.0006159432491093672, |
|
"loss": 0.5079, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.19018671298306555, |
|
"grad_norm": 0.047450825572013855, |
|
"learning_rate": 0.0006143834918526527, |
|
"loss": 0.5532, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.19062092922275292, |
|
"grad_norm": 0.047222550958395004, |
|
"learning_rate": 0.0006128225591285831, |
|
"loss": 0.5376, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.1910551454624403, |
|
"grad_norm": 0.05076510086655617, |
|
"learning_rate": 0.0006112604669781572, |
|
"loss": 0.5152, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.19148936170212766, |
|
"grad_norm": 0.06690964847803116, |
|
"learning_rate": 0.0006096972314542889, |
|
"loss": 0.5395, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.19192357794181503, |
|
"grad_norm": 0.049380868673324585, |
|
"learning_rate": 0.0006081328686216418, |
|
"loss": 0.4996, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.1923577941815024, |
|
"grad_norm": 0.06767404824495316, |
|
"learning_rate": 0.0006065673945564642, |
|
"loss": 0.5217, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.19279201042118976, |
|
"grad_norm": 0.054420776665210724, |
|
"learning_rate": 0.0006050008253464246, |
|
"loss": 0.5587, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.1932262266608771, |
|
"grad_norm": 0.0557849258184433, |
|
"learning_rate": 0.0006034331770904454, |
|
"loss": 0.5441, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.19366044290056447, |
|
"grad_norm": 0.06514862924814224, |
|
"learning_rate": 0.0006018644658985379, |
|
"loss": 0.5666, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.19409465914025184, |
|
"grad_norm": 0.061305928975343704, |
|
"learning_rate": 0.0006002947078916364, |
|
"loss": 0.5576, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.1945288753799392, |
|
"grad_norm": 0.05214373767375946, |
|
"learning_rate": 0.0005987239192014335, |
|
"loss": 0.5424, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.19496309161962658, |
|
"grad_norm": 0.05650056526064873, |
|
"learning_rate": 0.0005971521159702136, |
|
"loss": 0.5361, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.19539730785931395, |
|
"grad_norm": 0.05745421350002289, |
|
"learning_rate": 0.0005955793143506863, |
|
"loss": 0.5273, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.19583152409900131, |
|
"grad_norm": 0.05364866554737091, |
|
"learning_rate": 0.0005940055305058219, |
|
"loss": 0.5026, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.19626574033868865, |
|
"grad_norm": 0.047814685851335526, |
|
"learning_rate": 0.0005924307806086844, |
|
"loss": 0.5439, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.19669995657837602, |
|
"grad_norm": 0.051681190729141235, |
|
"learning_rate": 0.0005908550808422655, |
|
"loss": 0.5271, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.1971341728180634, |
|
"grad_norm": 0.05405402183532715, |
|
"learning_rate": 0.0005892784473993184, |
|
"loss": 0.4996, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.19756838905775076, |
|
"grad_norm": 0.04617345333099365, |
|
"learning_rate": 0.0005877008964821908, |
|
"loss": 0.5409, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.19800260529743813, |
|
"grad_norm": 0.05039280652999878, |
|
"learning_rate": 0.0005861224443026595, |
|
"loss": 0.5433, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.1984368215371255, |
|
"grad_norm": 0.05184314772486687, |
|
"learning_rate": 0.0005845431070817626, |
|
"loss": 0.5422, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.19887103777681286, |
|
"grad_norm": 0.056363269686698914, |
|
"learning_rate": 0.000582962901049634, |
|
"loss": 0.5202, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.1993052540165002, |
|
"grad_norm": 0.04747706279158592, |
|
"learning_rate": 0.0005813818424453351, |
|
"loss": 0.5287, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.19973947025618757, |
|
"grad_norm": 0.05112398415803909, |
|
"learning_rate": 0.0005797999475166897, |
|
"loss": 0.563, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.20017368649587494, |
|
"grad_norm": 0.050983451306819916, |
|
"learning_rate": 0.0005782172325201155, |
|
"loss": 0.5309, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.2006079027355623, |
|
"grad_norm": 0.05590641498565674, |
|
"learning_rate": 0.000576633713720458, |
|
"loss": 0.54, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.20104211897524968, |
|
"grad_norm": 0.04705791547894478, |
|
"learning_rate": 0.000575049407390823, |
|
"loss": 0.5291, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.20147633521493705, |
|
"grad_norm": 0.05234816297888756, |
|
"learning_rate": 0.000573464329812409, |
|
"loss": 0.5331, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.20191055145462442, |
|
"grad_norm": 0.04806197062134743, |
|
"learning_rate": 0.0005718784972743409, |
|
"loss": 0.5141, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.20234476769431176, |
|
"grad_norm": 0.0483107753098011, |
|
"learning_rate": 0.0005702919260735014, |
|
"loss": 0.5066, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.20277898393399912, |
|
"grad_norm": 0.05739190801978111, |
|
"learning_rate": 0.0005687046325143647, |
|
"loss": 0.5282, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.2032132001736865, |
|
"grad_norm": 0.04906076937913895, |
|
"learning_rate": 0.0005671166329088278, |
|
"loss": 0.5257, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.20364741641337386, |
|
"grad_norm": 0.04784254729747772, |
|
"learning_rate": 0.0005655279435760435, |
|
"loss": 0.5193, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.20408163265306123, |
|
"grad_norm": 0.06473550200462341, |
|
"learning_rate": 0.000563938580842253, |
|
"loss": 0.5257, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2045158488927486, |
|
"grad_norm": 0.047775350511074066, |
|
"learning_rate": 0.0005623485610406173, |
|
"loss": 0.5008, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.20495006513243597, |
|
"grad_norm": 0.05449296161532402, |
|
"learning_rate": 0.0005607579005110502, |
|
"loss": 0.5316, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.2053842813721233, |
|
"grad_norm": 0.05296491086483002, |
|
"learning_rate": 0.0005591666156000494, |
|
"loss": 0.5493, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.20581849761181067, |
|
"grad_norm": 0.045328423380851746, |
|
"learning_rate": 0.0005575747226605297, |
|
"loss": 0.508, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.20625271385149804, |
|
"grad_norm": 0.05045296251773834, |
|
"learning_rate": 0.0005559822380516539, |
|
"loss": 0.5118, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2066869300911854, |
|
"grad_norm": 0.04571797326207161, |
|
"learning_rate": 0.0005543891781386656, |
|
"loss": 0.5168, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.20712114633087278, |
|
"grad_norm": 0.05250009894371033, |
|
"learning_rate": 0.0005527955592927197, |
|
"loss": 0.5175, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.20755536257056015, |
|
"grad_norm": 0.048049163073301315, |
|
"learning_rate": 0.0005512013978907157, |
|
"loss": 0.5279, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.20798957881024752, |
|
"grad_norm": 0.04621399939060211, |
|
"learning_rate": 0.0005496067103151288, |
|
"loss": 0.5304, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.20842379504993486, |
|
"grad_norm": 0.04643552750349045, |
|
"learning_rate": 0.0005480115129538409, |
|
"loss": 0.5137, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.20885801128962223, |
|
"grad_norm": 0.05422172695398331, |
|
"learning_rate": 0.0005464158221999731, |
|
"loss": 0.525, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.2092922275293096, |
|
"grad_norm": 0.04593772441148758, |
|
"learning_rate": 0.0005448196544517168, |
|
"loss": 0.5229, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.20972644376899696, |
|
"grad_norm": 0.05278801918029785, |
|
"learning_rate": 0.0005432230261121651, |
|
"loss": 0.5256, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.21016066000868433, |
|
"grad_norm": 0.04598787799477577, |
|
"learning_rate": 0.0005416259535891447, |
|
"loss": 0.5275, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.2105948762483717, |
|
"grad_norm": 0.054117828607559204, |
|
"learning_rate": 0.0005400284532950467, |
|
"loss": 0.5179, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.21102909248805907, |
|
"grad_norm": 0.046420540660619736, |
|
"learning_rate": 0.0005384305416466584, |
|
"loss": 0.5399, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.2114633087277464, |
|
"grad_norm": 0.048214301466941833, |
|
"learning_rate": 0.0005368322350649942, |
|
"loss": 0.5202, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.21189752496743378, |
|
"grad_norm": 0.05419298633933067, |
|
"learning_rate": 0.0005352335499751269, |
|
"loss": 0.4925, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.21233174120712114, |
|
"grad_norm": 0.04759565740823746, |
|
"learning_rate": 0.0005336345028060199, |
|
"loss": 0.5162, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.2127659574468085, |
|
"grad_norm": 0.05174829810857773, |
|
"learning_rate": 0.0005320351099903565, |
|
"loss": 0.5043, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.21320017368649588, |
|
"grad_norm": 0.06595347821712494, |
|
"learning_rate": 0.0005304353879643726, |
|
"loss": 0.5029, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.21363438992618325, |
|
"grad_norm": 0.04908424988389015, |
|
"learning_rate": 0.0005288353531676872, |
|
"loss": 0.5043, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.21406860616587062, |
|
"grad_norm": 0.05055024474859238, |
|
"learning_rate": 0.0005272350220431334, |
|
"loss": 0.5192, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.21450282240555796, |
|
"grad_norm": 0.049286942929029465, |
|
"learning_rate": 0.0005256344110365896, |
|
"loss": 0.5041, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.21493703864524533, |
|
"grad_norm": 0.04700294882059097, |
|
"learning_rate": 0.0005240335365968104, |
|
"loss": 0.5258, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2153712548849327, |
|
"grad_norm": 0.0560920424759388, |
|
"learning_rate": 0.0005224324151752575, |
|
"loss": 0.5388, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.21580547112462006, |
|
"grad_norm": 0.05053974315524101, |
|
"learning_rate": 0.0005208310632259308, |
|
"loss": 0.5045, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.21623968736430743, |
|
"grad_norm": 0.06237909570336342, |
|
"learning_rate": 0.0005192294972051992, |
|
"loss": 0.463, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.2166739036039948, |
|
"grad_norm": 0.05128193646669388, |
|
"learning_rate": 0.0005176277335716317, |
|
"loss": 0.5209, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.21710811984368214, |
|
"grad_norm": 0.04433543235063553, |
|
"learning_rate": 0.0005160257887858277, |
|
"loss": 0.5046, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2175423360833695, |
|
"grad_norm": 0.05125072970986366, |
|
"learning_rate": 0.0005144236793102484, |
|
"loss": 0.4894, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.21797655232305688, |
|
"grad_norm": 0.04465992748737335, |
|
"learning_rate": 0.0005128214216090478, |
|
"loss": 0.4987, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.21841076856274425, |
|
"grad_norm": 0.04684034362435341, |
|
"learning_rate": 0.0005112190321479025, |
|
"loss": 0.5498, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.2188449848024316, |
|
"grad_norm": 0.04876565933227539, |
|
"learning_rate": 0.0005096165273938436, |
|
"loss": 0.5071, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.21927920104211898, |
|
"grad_norm": 0.04982059448957443, |
|
"learning_rate": 0.0005080139238150869, |
|
"loss": 0.5084, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.21971341728180635, |
|
"grad_norm": 0.052202560007572174, |
|
"learning_rate": 0.0005064112378808637, |
|
"loss": 0.5233, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.2201476335214937, |
|
"grad_norm": 0.04747169837355614, |
|
"learning_rate": 0.0005048084860612516, |
|
"loss": 0.5263, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.22058184976118106, |
|
"grad_norm": 0.0462409108877182, |
|
"learning_rate": 0.0005032056848270056, |
|
"loss": 0.5237, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.22101606600086843, |
|
"grad_norm": 0.04612501338124275, |
|
"learning_rate": 0.000501602850649388, |
|
"loss": 0.5507, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.2214502822405558, |
|
"grad_norm": 0.04581817239522934, |
|
"learning_rate": 0.0005, |
|
"loss": 0.4986, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.22188449848024316, |
|
"grad_norm": 0.0477612242102623, |
|
"learning_rate": 0.0004983971493506121, |
|
"loss": 0.51, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.22231871471993053, |
|
"grad_norm": 0.05176355689764023, |
|
"learning_rate": 0.0004967943151729944, |
|
"loss": 0.5115, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.2227529309596179, |
|
"grad_norm": 0.04731719568371773, |
|
"learning_rate": 0.0004951915139387483, |
|
"loss": 0.5564, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.22318714719930524, |
|
"grad_norm": 0.04438960924744606, |
|
"learning_rate": 0.0004935887621191363, |
|
"loss": 0.5502, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.2236213634389926, |
|
"grad_norm": 0.06116556003689766, |
|
"learning_rate": 0.0004919860761849132, |
|
"loss": 0.5417, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.22405557967867998, |
|
"grad_norm": 0.046353522688150406, |
|
"learning_rate": 0.0004903834726061564, |
|
"loss": 0.5285, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.22448979591836735, |
|
"grad_norm": 0.04643326997756958, |
|
"learning_rate": 0.0004887809678520976, |
|
"loss": 0.4774, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.22492401215805471, |
|
"grad_norm": 0.0530591681599617, |
|
"learning_rate": 0.0004871785783909523, |
|
"loss": 0.5076, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.22535822839774208, |
|
"grad_norm": 0.04789597541093826, |
|
"learning_rate": 0.0004855763206897516, |
|
"loss": 0.4989, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.22579244463742945, |
|
"grad_norm": 0.047394026070833206, |
|
"learning_rate": 0.0004839742112141724, |
|
"loss": 0.5069, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2262266608771168, |
|
"grad_norm": 0.044425006955862045, |
|
"learning_rate": 0.0004823722664283684, |
|
"loss": 0.5212, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.22666087711680416, |
|
"grad_norm": 0.044534265995025635, |
|
"learning_rate": 0.0004807705027948008, |
|
"loss": 0.5184, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.22709509335649153, |
|
"grad_norm": 0.04293319582939148, |
|
"learning_rate": 0.0004791689367740692, |
|
"loss": 0.4807, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.2275293095961789, |
|
"grad_norm": 0.04485393315553665, |
|
"learning_rate": 0.0004775675848247427, |
|
"loss": 0.4746, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.22796352583586627, |
|
"grad_norm": 0.043829191476106644, |
|
"learning_rate": 0.0004759664634031897, |
|
"loss": 0.4936, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.22839774207555363, |
|
"grad_norm": 0.05110118165612221, |
|
"learning_rate": 0.00047436558896341046, |
|
"loss": 0.5849, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.228831958315241, |
|
"grad_norm": 0.04783101752400398, |
|
"learning_rate": 0.0004727649779568666, |
|
"loss": 0.5034, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.22926617455492834, |
|
"grad_norm": 0.047072965651750565, |
|
"learning_rate": 0.00047116464683231285, |
|
"loss": 0.5155, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.2297003907946157, |
|
"grad_norm": 0.0427449531853199, |
|
"learning_rate": 0.0004695646120356274, |
|
"loss": 0.5138, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.23013460703430308, |
|
"grad_norm": 0.04664051905274391, |
|
"learning_rate": 0.0004679648900096436, |
|
"loss": 0.4727, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.23056882327399045, |
|
"grad_norm": 0.04555191844701767, |
|
"learning_rate": 0.0004663654971939802, |
|
"loss": 0.5386, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.23100303951367782, |
|
"grad_norm": 0.05113929882645607, |
|
"learning_rate": 0.00046476645002487297, |
|
"loss": 0.5018, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.23143725575336518, |
|
"grad_norm": 0.04245986044406891, |
|
"learning_rate": 0.00046316776493500613, |
|
"loss": 0.4949, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.23187147199305255, |
|
"grad_norm": 0.047864172607660294, |
|
"learning_rate": 0.0004615694583533418, |
|
"loss": 0.5032, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.2323056882327399, |
|
"grad_norm": 0.06898235529661179, |
|
"learning_rate": 0.0004599715467049534, |
|
"loss": 0.4922, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.23273990447242726, |
|
"grad_norm": 0.04230332002043724, |
|
"learning_rate": 0.0004583740464108554, |
|
"loss": 0.5164, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.23317412071211463, |
|
"grad_norm": 0.043854959309101105, |
|
"learning_rate": 0.00045677697388783495, |
|
"loss": 0.4894, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.233608336951802, |
|
"grad_norm": 0.05595362186431885, |
|
"learning_rate": 0.0004551803455482833, |
|
"loss": 0.5107, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.23404255319148937, |
|
"grad_norm": 0.05336504802107811, |
|
"learning_rate": 0.0004535841778000269, |
|
"loss": 0.4845, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.23447676943117673, |
|
"grad_norm": 0.052477382123470306, |
|
"learning_rate": 0.0004519884870461591, |
|
"loss": 0.5215, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2349109856708641, |
|
"grad_norm": 0.04965275526046753, |
|
"learning_rate": 0.00045039328968487125, |
|
"loss": 0.499, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.23534520191055144, |
|
"grad_norm": 0.07106837630271912, |
|
"learning_rate": 0.00044879860210928434, |
|
"loss": 0.5159, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.2357794181502388, |
|
"grad_norm": 0.06256967037916183, |
|
"learning_rate": 0.0004472044407072805, |
|
"loss": 0.505, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.23621363438992618, |
|
"grad_norm": 0.04907793179154396, |
|
"learning_rate": 0.0004456108218613346, |
|
"loss": 0.5117, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.23664785062961355, |
|
"grad_norm": 0.06353382021188736, |
|
"learning_rate": 0.0004440177619483461, |
|
"loss": 0.4941, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.23708206686930092, |
|
"grad_norm": 0.05265835300087929, |
|
"learning_rate": 0.0004424252773394704, |
|
"loss": 0.5179, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.23751628310898829, |
|
"grad_norm": 0.05525769665837288, |
|
"learning_rate": 0.0004408333843999506, |
|
"loss": 0.4938, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.23795049934867565, |
|
"grad_norm": 0.06166142225265503, |
|
"learning_rate": 0.00043924209948894985, |
|
"loss": 0.5287, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.238384715588363, |
|
"grad_norm": 0.04812607169151306, |
|
"learning_rate": 0.0004376514389593826, |
|
"loss": 0.5401, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.23881893182805036, |
|
"grad_norm": 0.048272911459207535, |
|
"learning_rate": 0.00043606141915774693, |
|
"loss": 0.5258, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.23925314806773773, |
|
"grad_norm": 0.04806293919682503, |
|
"learning_rate": 0.00043447205642395664, |
|
"loss": 0.5023, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.2396873643074251, |
|
"grad_norm": 0.04957396164536476, |
|
"learning_rate": 0.0004328833670911724, |
|
"loss": 0.5015, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.24012158054711247, |
|
"grad_norm": 0.04413224756717682, |
|
"learning_rate": 0.00043129536748563545, |
|
"loss": 0.5336, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.24055579678679984, |
|
"grad_norm": 0.04619096592068672, |
|
"learning_rate": 0.0004297080739264987, |
|
"loss": 0.4832, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.2409900130264872, |
|
"grad_norm": 0.05157000198960304, |
|
"learning_rate": 0.0004281215027256592, |
|
"loss": 0.529, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.24142422926617454, |
|
"grad_norm": 0.04697950556874275, |
|
"learning_rate": 0.00042653567018759105, |
|
"loss": 0.5034, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.2418584455058619, |
|
"grad_norm": 0.04245249181985855, |
|
"learning_rate": 0.0004249505926091771, |
|
"loss": 0.4839, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.24229266174554928, |
|
"grad_norm": 0.042621515691280365, |
|
"learning_rate": 0.000423366286279542, |
|
"loss": 0.5639, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.24272687798523665, |
|
"grad_norm": 0.054264895617961884, |
|
"learning_rate": 0.0004217827674798845, |
|
"loss": 0.5077, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.24316109422492402, |
|
"grad_norm": 0.04171125963330269, |
|
"learning_rate": 0.0004202000524833105, |
|
"loss": 0.5001, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2435953104646114, |
|
"grad_norm": 0.047443173825740814, |
|
"learning_rate": 0.00041861815755466506, |
|
"loss": 0.5153, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.24402952670429873, |
|
"grad_norm": 0.04359521344304085, |
|
"learning_rate": 0.0004170370989503662, |
|
"loss": 0.516, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.2444637429439861, |
|
"grad_norm": 0.042067334055900574, |
|
"learning_rate": 0.0004154568929182374, |
|
"loss": 0.4783, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.24489795918367346, |
|
"grad_norm": 0.042515527456998825, |
|
"learning_rate": 0.00041387755569734057, |
|
"loss": 0.5112, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.24533217542336083, |
|
"grad_norm": 0.04880267009139061, |
|
"learning_rate": 0.00041229910351780926, |
|
"loss": 0.5108, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.2457663916630482, |
|
"grad_norm": 0.048264361917972565, |
|
"learning_rate": 0.0004107215526006817, |
|
"loss": 0.4802, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.24620060790273557, |
|
"grad_norm": 0.0446770153939724, |
|
"learning_rate": 0.00040914491915773453, |
|
"loss": 0.5461, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.24663482414242294, |
|
"grad_norm": 0.05128175765275955, |
|
"learning_rate": 0.00040756921939131565, |
|
"loss": 0.5288, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.24706904038211028, |
|
"grad_norm": 0.05804765224456787, |
|
"learning_rate": 0.0004059944694941783, |
|
"loss": 0.4981, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.24750325662179765, |
|
"grad_norm": 0.04847874119877815, |
|
"learning_rate": 0.00040442068564931397, |
|
"loss": 0.5185, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.24793747286148501, |
|
"grad_norm": 0.04028409346938133, |
|
"learning_rate": 0.0004028478840297866, |
|
"loss": 0.493, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.24837168910117238, |
|
"grad_norm": 0.04243363067507744, |
|
"learning_rate": 0.00040127608079856646, |
|
"loss": 0.5079, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.24880590534085975, |
|
"grad_norm": 0.04515422508120537, |
|
"learning_rate": 0.00039970529210836363, |
|
"loss": 0.5121, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.24924012158054712, |
|
"grad_norm": 0.05250799283385277, |
|
"learning_rate": 0.00039813553410146226, |
|
"loss": 0.4975, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.2496743378202345, |
|
"grad_norm": 0.043340008705854416, |
|
"learning_rate": 0.00039656682290955457, |
|
"loss": 0.5094, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.25010855405992183, |
|
"grad_norm": 0.04954323545098305, |
|
"learning_rate": 0.00039499917465357534, |
|
"loss": 0.5327, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.2505427702996092, |
|
"grad_norm": 0.04032951593399048, |
|
"learning_rate": 0.00039343260544353573, |
|
"loss": 0.5035, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.25097698653929656, |
|
"grad_norm": 0.04386662319302559, |
|
"learning_rate": 0.0003918671313783583, |
|
"loss": 0.5142, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.25141120277898393, |
|
"grad_norm": 0.04662942886352539, |
|
"learning_rate": 0.00039030276854571115, |
|
"loss": 0.4996, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.2518454190186713, |
|
"grad_norm": 0.046797946095466614, |
|
"learning_rate": 0.00038873953302184284, |
|
"loss": 0.4728, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.25227963525835867, |
|
"grad_norm": 0.041140951216220856, |
|
"learning_rate": 0.000387177440871417, |
|
"loss": 0.546, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.25271385149804604, |
|
"grad_norm": 0.04658018797636032, |
|
"learning_rate": 0.00038561650814734736, |
|
"loss": 0.4804, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.2531480677377334, |
|
"grad_norm": 0.04261363297700882, |
|
"learning_rate": 0.0003840567508906328, |
|
"loss": 0.4833, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.2535822839774208, |
|
"grad_norm": 0.046267736703157425, |
|
"learning_rate": 0.0003824981851301924, |
|
"loss": 0.4809, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.25401650021710814, |
|
"grad_norm": 0.04030013084411621, |
|
"learning_rate": 0.0003809408268827009, |
|
"loss": 0.4878, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2544507164567955, |
|
"grad_norm": 0.041512105613946915, |
|
"learning_rate": 0.0003793846921524237, |
|
"loss": 0.5165, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.2548849326964828, |
|
"grad_norm": 0.04772355034947395, |
|
"learning_rate": 0.00037782979693105293, |
|
"loss": 0.5193, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 0.040986839681863785, |
|
"learning_rate": 0.00037627615719754295, |
|
"loss": 0.5028, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.25575336517585756, |
|
"grad_norm": 0.04139196127653122, |
|
"learning_rate": 0.00037472378891794533, |
|
"loss": 0.4998, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.25618758141554493, |
|
"grad_norm": 0.04017603024840355, |
|
"learning_rate": 0.0003731727080452464, |
|
"loss": 0.5239, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2566217976552323, |
|
"grad_norm": 0.04039851576089859, |
|
"learning_rate": 0.00037162293051920184, |
|
"loss": 0.4739, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.25705601389491967, |
|
"grad_norm": 0.04436818137764931, |
|
"learning_rate": 0.0003700744722661736, |
|
"loss": 0.5153, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.25749023013460703, |
|
"grad_norm": 0.03977528214454651, |
|
"learning_rate": 0.0003685273491989661, |
|
"loss": 0.5217, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.2579244463742944, |
|
"grad_norm": 0.038483258336782455, |
|
"learning_rate": 0.0003669815772166625, |
|
"loss": 0.4904, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.25835866261398177, |
|
"grad_norm": 0.04520029202103615, |
|
"learning_rate": 0.0003654371722044616, |
|
"loss": 0.5303, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.25879287885366914, |
|
"grad_norm": 0.04642047360539436, |
|
"learning_rate": 0.00036389415003351444, |
|
"loss": 0.4783, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.2592270950933565, |
|
"grad_norm": 0.037170182913541794, |
|
"learning_rate": 0.00036235252656076133, |
|
"loss": 0.5254, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.2596613113330439, |
|
"grad_norm": 0.037870265543460846, |
|
"learning_rate": 0.00036081231762876846, |
|
"loss": 0.4933, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.26009552757273124, |
|
"grad_norm": 0.03957228735089302, |
|
"learning_rate": 0.0003592735390655658, |
|
"loss": 0.5194, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.26052974381241856, |
|
"grad_norm": 0.04471513256430626, |
|
"learning_rate": 0.0003577362066844838, |
|
"loss": 0.4737, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2609639600521059, |
|
"grad_norm": 0.0370667465031147, |
|
"learning_rate": 0.0003562003362839914, |
|
"loss": 0.5072, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.2613981762917933, |
|
"grad_norm": 0.04277738928794861, |
|
"learning_rate": 0.00035466594364753326, |
|
"loss": 0.5093, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.26183239253148066, |
|
"grad_norm": 0.04622077941894531, |
|
"learning_rate": 0.00035313304454336763, |
|
"loss": 0.5008, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.26226660877116803, |
|
"grad_norm": 0.0380844846367836, |
|
"learning_rate": 0.0003516016547244047, |
|
"loss": 0.4899, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.2627008250108554, |
|
"grad_norm": 0.03964554890990257, |
|
"learning_rate": 0.0003500717899280442, |
|
"loss": 0.544, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.26313504125054277, |
|
"grad_norm": 0.041507843881845474, |
|
"learning_rate": 0.00034854346587601397, |
|
"loss": 0.5341, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.26356925749023014, |
|
"grad_norm": 0.04041410982608795, |
|
"learning_rate": 0.0003470166982742082, |
|
"loss": 0.5325, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.2640034737299175, |
|
"grad_norm": 0.03915121778845787, |
|
"learning_rate": 0.00034549150281252633, |
|
"loss": 0.5164, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.26443768996960487, |
|
"grad_norm": 0.04141729697585106, |
|
"learning_rate": 0.0003439678951647115, |
|
"loss": 0.4988, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.26487190620929224, |
|
"grad_norm": 0.043251313269138336, |
|
"learning_rate": 0.0003424458909881897, |
|
"loss": 0.5319, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2653061224489796, |
|
"grad_norm": 0.045615166425704956, |
|
"learning_rate": 0.00034092550592390857, |
|
"loss": 0.4668, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.265740338688667, |
|
"grad_norm": 0.03943486511707306, |
|
"learning_rate": 0.00033940675559617726, |
|
"loss": 0.4991, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.26617455492835435, |
|
"grad_norm": 0.0382043793797493, |
|
"learning_rate": 0.000337889655612505, |
|
"loss": 0.5117, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.26660877116804166, |
|
"grad_norm": 0.039549313485622406, |
|
"learning_rate": 0.00033637422156344155, |
|
"loss": 0.5029, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.267042987407729, |
|
"grad_norm": 0.03972559794783592, |
|
"learning_rate": 0.00033486046902241664, |
|
"loss": 0.5073, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.2674772036474164, |
|
"grad_norm": 0.03974078595638275, |
|
"learning_rate": 0.0003333484135455792, |
|
"loss": 0.5083, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.26791141988710376, |
|
"grad_norm": 0.039242375642061234, |
|
"learning_rate": 0.00033183807067163916, |
|
"loss": 0.4979, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.26834563612679113, |
|
"grad_norm": 0.045767784118652344, |
|
"learning_rate": 0.0003303294559217063, |
|
"loss": 0.4835, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.2687798523664785, |
|
"grad_norm": 0.03927744925022125, |
|
"learning_rate": 0.0003288225847991312, |
|
"loss": 0.5103, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.26921406860616587, |
|
"grad_norm": 0.03815273195505142, |
|
"learning_rate": 0.0003273174727893463, |
|
"loss": 0.5097, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.26964828484585324, |
|
"grad_norm": 0.0421292670071125, |
|
"learning_rate": 0.00032581413535970593, |
|
"loss": 0.4996, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.2700825010855406, |
|
"grad_norm": 0.0482180081307888, |
|
"learning_rate": 0.0003243125879593286, |
|
"loss": 0.4785, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.270516717325228, |
|
"grad_norm": 0.03935736045241356, |
|
"learning_rate": 0.0003228128460189368, |
|
"loss": 0.5063, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.27095093356491534, |
|
"grad_norm": 0.037722837179899216, |
|
"learning_rate": 0.0003213149249506997, |
|
"loss": 0.4979, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.2713851498046027, |
|
"grad_norm": 0.03764335811138153, |
|
"learning_rate": 0.0003198188401480734, |
|
"loss": 0.4818, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.2718193660442901, |
|
"grad_norm": 0.04136700555682182, |
|
"learning_rate": 0.0003183246069856443, |
|
"loss": 0.5144, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.27225358228397745, |
|
"grad_norm": 0.04152214527130127, |
|
"learning_rate": 0.00031683224081897, |
|
"loss": 0.5217, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.27268779852366476, |
|
"grad_norm": 0.036686960607767105, |
|
"learning_rate": 0.00031534175698442194, |
|
"loss": 0.4938, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.27312201476335213, |
|
"grad_norm": 0.0376245342195034, |
|
"learning_rate": 0.00031385317079902743, |
|
"loss": 0.5255, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.2735562310030395, |
|
"grad_norm": 0.04143936559557915, |
|
"learning_rate": 0.000312366497560313, |
|
"loss": 0.5005, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.27399044724272686, |
|
"grad_norm": 0.036747369915246964, |
|
"learning_rate": 0.00031088175254614616, |
|
"loss": 0.4621, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.27442466348241423, |
|
"grad_norm": 0.039392732083797455, |
|
"learning_rate": 0.00030939895101457916, |
|
"loss": 0.5333, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.2748588797221016, |
|
"grad_norm": 0.03783806785941124, |
|
"learning_rate": 0.0003079181082036922, |
|
"loss": 0.5075, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.27529309596178897, |
|
"grad_norm": 0.03799246624112129, |
|
"learning_rate": 0.000306439239331436, |
|
"loss": 0.5086, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.27572731220147634, |
|
"grad_norm": 0.040941022336483, |
|
"learning_rate": 0.0003049623595954766, |
|
"loss": 0.5125, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.2761615284411637, |
|
"grad_norm": 0.045769453048706055, |
|
"learning_rate": 0.0003034874841730382, |
|
"loss": 0.4913, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.2765957446808511, |
|
"grad_norm": 0.044418562203645706, |
|
"learning_rate": 0.00030201462822074786, |
|
"loss": 0.5225, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.27702996092053844, |
|
"grad_norm": 0.036289479583501816, |
|
"learning_rate": 0.0003005438068744792, |
|
"loss": 0.5016, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.2774641771602258, |
|
"grad_norm": 0.03927507996559143, |
|
"learning_rate": 0.0002990750352491973, |
|
"loss": 0.497, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.2778983933999132, |
|
"grad_norm": 0.052069611847400665, |
|
"learning_rate": 0.0002976083284388031, |
|
"loss": 0.5026, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.27833260963960055, |
|
"grad_norm": 0.0408078134059906, |
|
"learning_rate": 0.00029614370151597835, |
|
"loss": 0.5069, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.27876682587928786, |
|
"grad_norm": 0.040615539997816086, |
|
"learning_rate": 0.0002946811695320311, |
|
"loss": 0.4878, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.27920104211897523, |
|
"grad_norm": 0.04145561158657074, |
|
"learning_rate": 0.00029322074751673977, |
|
"loss": 0.4547, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.2796352583586626, |
|
"grad_norm": 0.044274091720581055, |
|
"learning_rate": 0.00029176245047820063, |
|
"loss": 0.5051, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.28006947459834997, |
|
"grad_norm": 0.055839963257312775, |
|
"learning_rate": 0.0002903062934026716, |
|
"loss": 0.495, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.28050369083803733, |
|
"grad_norm": 0.038132019340991974, |
|
"learning_rate": 0.0002888522912544202, |
|
"loss": 0.4746, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.2809379070777247, |
|
"grad_norm": 0.039124827831983566, |
|
"learning_rate": 0.00028740045897556767, |
|
"loss": 0.4978, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.28137212331741207, |
|
"grad_norm": 0.03960909694433212, |
|
"learning_rate": 0.0002859508114859374, |
|
"loss": 0.5043, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.28180633955709944, |
|
"grad_norm": 0.03705562651157379, |
|
"learning_rate": 0.00028450336368289974, |
|
"loss": 0.5041, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.2822405557967868, |
|
"grad_norm": 0.03920963406562805, |
|
"learning_rate": 0.00028305813044122096, |
|
"loss": 0.5055, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2826747720364742, |
|
"grad_norm": 0.04196755215525627, |
|
"learning_rate": 0.00028161512661290845, |
|
"loss": 0.4962, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.28310898827616154, |
|
"grad_norm": 0.03515279293060303, |
|
"learning_rate": 0.00028017436702705903, |
|
"loss": 0.4708, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.2835432045158489, |
|
"grad_norm": 0.042391590774059296, |
|
"learning_rate": 0.0002787358664897068, |
|
"loss": 0.5268, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.2839774207555363, |
|
"grad_norm": 0.03707970678806305, |
|
"learning_rate": 0.0002772996397836704, |
|
"loss": 0.5252, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.28441163699522365, |
|
"grad_norm": 0.039679594337940216, |
|
"learning_rate": 0.0002758657016684015, |
|
"loss": 0.4954, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.28484585323491096, |
|
"grad_norm": 0.03491951525211334, |
|
"learning_rate": 0.0002744340668798326, |
|
"loss": 0.4859, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.28528006947459833, |
|
"grad_norm": 0.033995699137449265, |
|
"learning_rate": 0.00027300475013022663, |
|
"loss": 0.4917, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.0408964566886425, |
|
"learning_rate": 0.00027157776610802414, |
|
"loss": 0.5176, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.28614850195397307, |
|
"grad_norm": 0.035840053111314774, |
|
"learning_rate": 0.0002701531294776943, |
|
"loss": 0.4757, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.28658271819366044, |
|
"grad_norm": 0.03554424270987511, |
|
"learning_rate": 0.0002687308548795825, |
|
"loss": 0.5043, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2870169344333478, |
|
"grad_norm": 0.03549895063042641, |
|
"learning_rate": 0.00026731095692976073, |
|
"loss": 0.4924, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.28745115067303517, |
|
"grad_norm": 0.0438251756131649, |
|
"learning_rate": 0.00026589345021987723, |
|
"loss": 0.4986, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.28788536691272254, |
|
"grad_norm": 0.037839341908693314, |
|
"learning_rate": 0.00026447834931700686, |
|
"loss": 0.4716, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.2883195831524099, |
|
"grad_norm": 0.03682737424969673, |
|
"learning_rate": 0.0002630656687635007, |
|
"loss": 0.4991, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.2887537993920973, |
|
"grad_norm": 0.03697109594941139, |
|
"learning_rate": 0.0002616554230768374, |
|
"loss": 0.5104, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.28918801563178465, |
|
"grad_norm": 0.036725230515003204, |
|
"learning_rate": 0.0002602476267494731, |
|
"loss": 0.5163, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.289622231871472, |
|
"grad_norm": 0.04177143797278404, |
|
"learning_rate": 0.0002588422942486932, |
|
"loss": 0.4567, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.2900564481111594, |
|
"grad_norm": 0.04078909382224083, |
|
"learning_rate": 0.0002574394400164639, |
|
"loss": 0.4946, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.2904906643508467, |
|
"grad_norm": 0.04146347567439079, |
|
"learning_rate": 0.0002560390784692828, |
|
"loss": 0.4947, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.29092488059053406, |
|
"grad_norm": 0.03871840611100197, |
|
"learning_rate": 0.00025464122399803123, |
|
"loss": 0.5238, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.29135909683022143, |
|
"grad_norm": 0.04148663207888603, |
|
"learning_rate": 0.00025324589096782657, |
|
"loss": 0.5246, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.2917933130699088, |
|
"grad_norm": 0.03910430520772934, |
|
"learning_rate": 0.0002518530937178751, |
|
"loss": 0.4846, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.29222752930959617, |
|
"grad_norm": 0.036824680864810944, |
|
"learning_rate": 0.000250462846561323, |
|
"loss": 0.4654, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.29266174554928354, |
|
"grad_norm": 0.036250337958335876, |
|
"learning_rate": 0.00024907516378511136, |
|
"loss": 0.5106, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.2930959617889709, |
|
"grad_norm": 0.03372564911842346, |
|
"learning_rate": 0.00024769005964982713, |
|
"loss": 0.5222, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.2935301780286583, |
|
"grad_norm": 0.03601228445768356, |
|
"learning_rate": 0.000246307548389559, |
|
"loss": 0.5099, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.29396439426834564, |
|
"grad_norm": 0.039937473833560944, |
|
"learning_rate": 0.0002449276442117486, |
|
"loss": 0.5262, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.294398610508033, |
|
"grad_norm": 0.03719143569469452, |
|
"learning_rate": 0.000243550361297047, |
|
"loss": 0.5089, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.2948328267477204, |
|
"grad_norm": 0.034734416753053665, |
|
"learning_rate": 0.00024217571379916668, |
|
"loss": 0.4962, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.29526704298740775, |
|
"grad_norm": 0.03447417914867401, |
|
"learning_rate": 0.00024080371584473748, |
|
"loss": 0.4952, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2957012592270951, |
|
"grad_norm": 0.03631613776087761, |
|
"learning_rate": 0.00023943438153316155, |
|
"loss": 0.497, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.2961354754667825, |
|
"grad_norm": 0.03350459039211273, |
|
"learning_rate": 0.00023806772493646723, |
|
"loss": 0.4775, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.2965696917064698, |
|
"grad_norm": 0.032677456736564636, |
|
"learning_rate": 0.00023670376009916595, |
|
"loss": 0.5001, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.29700390794615716, |
|
"grad_norm": 0.03544562682509422, |
|
"learning_rate": 0.00023534250103810628, |
|
"loss": 0.4796, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.29743812418584453, |
|
"grad_norm": 0.03991761803627014, |
|
"learning_rate": 0.00023398396174233177, |
|
"loss": 0.5307, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.2978723404255319, |
|
"grad_norm": 0.037654612213373184, |
|
"learning_rate": 0.00023262815617293515, |
|
"loss": 0.4727, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.29830655666521927, |
|
"grad_norm": 0.03447496145963669, |
|
"learning_rate": 0.00023127509826291698, |
|
"loss": 0.5037, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.29874077290490664, |
|
"grad_norm": 0.03626209869980812, |
|
"learning_rate": 0.00022992480191704002, |
|
"loss": 0.4758, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.299174989144594, |
|
"grad_norm": 0.03519487380981445, |
|
"learning_rate": 0.000228577281011689, |
|
"loss": 0.5046, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.2996092053842814, |
|
"grad_norm": 0.036514561623334885, |
|
"learning_rate": 0.0002272325493947257, |
|
"loss": 0.5014, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.30004342162396874, |
|
"grad_norm": 0.036256395280361176, |
|
"learning_rate": 0.00022589062088534834, |
|
"loss": 0.5311, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.3004776378636561, |
|
"grad_norm": 0.036770399659872055, |
|
"learning_rate": 0.0002245515092739488, |
|
"loss": 0.4922, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.3009118541033435, |
|
"grad_norm": 0.037295542657375336, |
|
"learning_rate": 0.00022321522832197032, |
|
"loss": 0.4817, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.30134607034303085, |
|
"grad_norm": 0.03913348540663719, |
|
"learning_rate": 0.00022188179176176764, |
|
"loss": 0.4565, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.3017802865827182, |
|
"grad_norm": 0.04223249852657318, |
|
"learning_rate": 0.00022055121329646416, |
|
"loss": 0.5297, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3022145028224056, |
|
"grad_norm": 0.03583509102463722, |
|
"learning_rate": 0.0002192235065998126, |
|
"loss": 0.4808, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.3026487190620929, |
|
"grad_norm": 0.04437342286109924, |
|
"learning_rate": 0.0002178986853160535, |
|
"loss": 0.4794, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.30308293530178027, |
|
"grad_norm": 0.043945055454969406, |
|
"learning_rate": 0.0002165767630597752, |
|
"loss": 0.4959, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.30351715154146763, |
|
"grad_norm": 0.04786218702793121, |
|
"learning_rate": 0.00021525775341577403, |
|
"loss": 0.5047, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.303951367781155, |
|
"grad_norm": 0.041139792650938034, |
|
"learning_rate": 0.0002139416699389153, |
|
"loss": 0.5134, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.30438558402084237, |
|
"grad_norm": 0.034917134791612625, |
|
"learning_rate": 0.00021262852615399258, |
|
"loss": 0.4924, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.30481980026052974, |
|
"grad_norm": 0.03301383554935455, |
|
"learning_rate": 0.0002113183355555904, |
|
"loss": 0.4862, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.3052540165002171, |
|
"grad_norm": 0.0419352687895298, |
|
"learning_rate": 0.00021001111160794383, |
|
"loss": 0.5147, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.3056882327399045, |
|
"grad_norm": 0.03364208713173866, |
|
"learning_rate": 0.00020870686774480197, |
|
"loss": 0.5236, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.30612244897959184, |
|
"grad_norm": 0.03912827745079994, |
|
"learning_rate": 0.0002074056173692881, |
|
"loss": 0.5042, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3065566652192792, |
|
"grad_norm": 0.03657132387161255, |
|
"learning_rate": 0.00020610737385376348, |
|
"loss": 0.5011, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.3069908814589666, |
|
"grad_norm": 0.03518640622496605, |
|
"learning_rate": 0.00020481215053968872, |
|
"loss": 0.4798, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.30742509769865395, |
|
"grad_norm": 0.03507522493600845, |
|
"learning_rate": 0.00020351996073748714, |
|
"loss": 0.5313, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.3078593139383413, |
|
"grad_norm": 0.03609062731266022, |
|
"learning_rate": 0.00020223081772640866, |
|
"loss": 0.4785, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.3082935301780287, |
|
"grad_norm": 0.03479000926017761, |
|
"learning_rate": 0.000200944734754392, |
|
"loss": 0.4835, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.308727746417716, |
|
"grad_norm": 0.044929634779691696, |
|
"learning_rate": 0.00019966172503792985, |
|
"loss": 0.5122, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.30916196265740337, |
|
"grad_norm": 0.03376319631934166, |
|
"learning_rate": 0.00019838180176193177, |
|
"loss": 0.4876, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.30959617889709073, |
|
"grad_norm": 0.03246736153960228, |
|
"learning_rate": 0.0001971049780795901, |
|
"loss": 0.4804, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.3100303951367781, |
|
"grad_norm": 0.031609781086444855, |
|
"learning_rate": 0.00019583126711224342, |
|
"loss": 0.545, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.31046461137646547, |
|
"grad_norm": 0.034195881336927414, |
|
"learning_rate": 0.00019456068194924288, |
|
"loss": 0.5096, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.31089882761615284, |
|
"grad_norm": 0.03344978764653206, |
|
"learning_rate": 0.0001932932356478168, |
|
"loss": 0.5079, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.3113330438558402, |
|
"grad_norm": 0.03219503536820412, |
|
"learning_rate": 0.00019202894123293674, |
|
"loss": 0.5339, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.3117672600955276, |
|
"grad_norm": 0.03737034648656845, |
|
"learning_rate": 0.00019076781169718426, |
|
"loss": 0.5131, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.31220147633521494, |
|
"grad_norm": 0.03394628316164017, |
|
"learning_rate": 0.00018950986000061638, |
|
"loss": 0.4969, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.3126356925749023, |
|
"grad_norm": 0.032426606863737106, |
|
"learning_rate": 0.00018825509907063325, |
|
"loss": 0.49, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3130699088145897, |
|
"grad_norm": 0.03836345672607422, |
|
"learning_rate": 0.00018700354180184464, |
|
"loss": 0.4925, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.31350412505427705, |
|
"grad_norm": 0.035867128521203995, |
|
"learning_rate": 0.0001857552010559382, |
|
"loss": 0.5466, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.3139383412939644, |
|
"grad_norm": 0.03811186924576759, |
|
"learning_rate": 0.0001845100896615462, |
|
"loss": 0.4825, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.31437255753365173, |
|
"grad_norm": 0.032081685960292816, |
|
"learning_rate": 0.00018326822041411523, |
|
"loss": 0.4858, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.3148067737733391, |
|
"grad_norm": 0.03794045001268387, |
|
"learning_rate": 0.00018202960607577247, |
|
"loss": 0.5023, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.31524099001302647, |
|
"grad_norm": 0.033024583011865616, |
|
"learning_rate": 0.00018079425937519728, |
|
"loss": 0.4744, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.31567520625271384, |
|
"grad_norm": 0.03800756111741066, |
|
"learning_rate": 0.00017956219300748795, |
|
"loss": 0.4813, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.3161094224924012, |
|
"grad_norm": 0.03822485730051994, |
|
"learning_rate": 0.0001783334196340331, |
|
"loss": 0.4984, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.3165436387320886, |
|
"grad_norm": 0.03531699255108833, |
|
"learning_rate": 0.00017710795188237988, |
|
"loss": 0.5095, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.31697785497177594, |
|
"grad_norm": 0.03231954947113991, |
|
"learning_rate": 0.0001758858023461059, |
|
"loss": 0.4795, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3174120712114633, |
|
"grad_norm": 0.04151546210050583, |
|
"learning_rate": 0.00017466698358468825, |
|
"loss": 0.4852, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.3178462874511507, |
|
"grad_norm": 0.03277713060379028, |
|
"learning_rate": 0.00017345150812337563, |
|
"loss": 0.4736, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.31828050369083805, |
|
"grad_norm": 0.03749159723520279, |
|
"learning_rate": 0.0001722393884530593, |
|
"loss": 0.5042, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.3187147199305254, |
|
"grad_norm": 0.029695043340325356, |
|
"learning_rate": 0.00017103063703014372, |
|
"loss": 0.511, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.3191489361702128, |
|
"grad_norm": 0.033079251646995544, |
|
"learning_rate": 0.00016982526627642042, |
|
"loss": 0.4871, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.31958315240990015, |
|
"grad_norm": 0.03277754411101341, |
|
"learning_rate": 0.00016862328857893854, |
|
"loss": 0.4805, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.3200173686495875, |
|
"grad_norm": 0.031276337802410126, |
|
"learning_rate": 0.00016742471628987892, |
|
"loss": 0.5195, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.32045158488927483, |
|
"grad_norm": 0.031646616756916046, |
|
"learning_rate": 0.000166229561726426, |
|
"loss": 0.4899, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.3208858011289622, |
|
"grad_norm": 0.03364792838692665, |
|
"learning_rate": 0.00016503783717064247, |
|
"loss": 0.5011, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.32132001736864957, |
|
"grad_norm": 0.032123491168022156, |
|
"learning_rate": 0.00016384955486934156, |
|
"loss": 0.5083, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.32175423360833694, |
|
"grad_norm": 0.03388531878590584, |
|
"learning_rate": 0.00016266472703396284, |
|
"loss": 0.5127, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.3221884498480243, |
|
"grad_norm": 0.03697797656059265, |
|
"learning_rate": 0.00016148336584044537, |
|
"loss": 0.5167, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.3226226660877117, |
|
"grad_norm": 0.03302115574479103, |
|
"learning_rate": 0.000160305483429103, |
|
"loss": 0.483, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.32305688232739904, |
|
"grad_norm": 0.03180578723549843, |
|
"learning_rate": 0.0001591310919045003, |
|
"loss": 0.5144, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.3234910985670864, |
|
"grad_norm": 0.033885449171066284, |
|
"learning_rate": 0.00015796020333532697, |
|
"loss": 0.484, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3239253148067738, |
|
"grad_norm": 0.03529027849435806, |
|
"learning_rate": 0.0001567928297542749, |
|
"loss": 0.4967, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.32435953104646115, |
|
"grad_norm": 0.030900994315743446, |
|
"learning_rate": 0.00015562898315791353, |
|
"loss": 0.5177, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.3247937472861485, |
|
"grad_norm": 0.03337928652763367, |
|
"learning_rate": 0.00015446867550656767, |
|
"loss": 0.4902, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.3252279635258359, |
|
"grad_norm": 0.028887873515486717, |
|
"learning_rate": 0.00015331191872419348, |
|
"loss": 0.4841, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.32566217976552325, |
|
"grad_norm": 0.03361840918660164, |
|
"learning_rate": 0.0001521587246982568, |
|
"loss": 0.5171, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3260963960052106, |
|
"grad_norm": 0.030776534229516983, |
|
"learning_rate": 0.00015100910527961049, |
|
"loss": 0.4592, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.32653061224489793, |
|
"grad_norm": 0.028537072241306305, |
|
"learning_rate": 0.00014986307228237266, |
|
"loss": 0.4894, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.3269648284845853, |
|
"grad_norm": 0.02929234690964222, |
|
"learning_rate": 0.00014872063748380543, |
|
"loss": 0.4832, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.32739904472427267, |
|
"grad_norm": 0.02816353552043438, |
|
"learning_rate": 0.00014758181262419424, |
|
"loss": 0.4788, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.32783326096396004, |
|
"grad_norm": 0.03165418654680252, |
|
"learning_rate": 0.00014644660940672628, |
|
"loss": 0.5128, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.3282674772036474, |
|
"grad_norm": 0.0279870443046093, |
|
"learning_rate": 0.00014531503949737106, |
|
"loss": 0.4966, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.3287016934433348, |
|
"grad_norm": 0.029440978541970253, |
|
"learning_rate": 0.00014418711452476046, |
|
"loss": 0.4785, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.32913590968302214, |
|
"grad_norm": 0.032016150653362274, |
|
"learning_rate": 0.00014306284608006837, |
|
"loss": 0.4813, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.3295701259227095, |
|
"grad_norm": 0.029074901714920998, |
|
"learning_rate": 0.00014194224571689284, |
|
"loss": 0.5165, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.3300043421623969, |
|
"grad_norm": 0.030839256942272186, |
|
"learning_rate": 0.00014082532495113625, |
|
"loss": 0.4615, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.33043855840208425, |
|
"grad_norm": 0.03017420321702957, |
|
"learning_rate": 0.00013971209526088762, |
|
"loss": 0.473, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.3308727746417716, |
|
"grad_norm": 0.03192548081278801, |
|
"learning_rate": 0.00013860256808630427, |
|
"loss": 0.5315, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.331306990881459, |
|
"grad_norm": 0.029673421755433083, |
|
"learning_rate": 0.00013749675482949486, |
|
"loss": 0.4978, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.33174120712114635, |
|
"grad_norm": 0.035233914852142334, |
|
"learning_rate": 0.00013639466685440134, |
|
"loss": 0.5174, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.3321754233608337, |
|
"grad_norm": 0.03196287900209427, |
|
"learning_rate": 0.00013529631548668298, |
|
"loss": 0.5121, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.33260963960052103, |
|
"grad_norm": 0.028510358184576035, |
|
"learning_rate": 0.0001342017120135993, |
|
"loss": 0.4629, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.3330438558402084, |
|
"grad_norm": 0.032123226672410965, |
|
"learning_rate": 0.0001331108676838948, |
|
"loss": 0.5538, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.33347807207989577, |
|
"grad_norm": 0.0301981084048748, |
|
"learning_rate": 0.00013202379370768252, |
|
"loss": 0.5278, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.33391228831958314, |
|
"grad_norm": 0.029721124097704887, |
|
"learning_rate": 0.00013094050125632973, |
|
"loss": 0.5353, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.3343465045592705, |
|
"grad_norm": 0.029476812109351158, |
|
"learning_rate": 0.00012986100146234231, |
|
"loss": 0.488, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3347807207989579, |
|
"grad_norm": 0.029208241030573845, |
|
"learning_rate": 0.00012878530541925076, |
|
"loss": 0.5384, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.33521493703864524, |
|
"grad_norm": 0.029901737347245216, |
|
"learning_rate": 0.00012771342418149657, |
|
"loss": 0.4675, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.3356491532783326, |
|
"grad_norm": 0.030133001506328583, |
|
"learning_rate": 0.00012664536876431755, |
|
"loss": 0.5175, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.33608336951802, |
|
"grad_norm": 0.028815865516662598, |
|
"learning_rate": 0.0001255811501436359, |
|
"loss": 0.5012, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.33651758575770735, |
|
"grad_norm": 0.02868812158703804, |
|
"learning_rate": 0.00012452077925594434, |
|
"loss": 0.521, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3369518019973947, |
|
"grad_norm": 0.035325054079294205, |
|
"learning_rate": 0.00012346426699819457, |
|
"loss": 0.5075, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.3373860182370821, |
|
"grad_norm": 0.03588160499930382, |
|
"learning_rate": 0.00012241162422768444, |
|
"loss": 0.5091, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.33782023447676945, |
|
"grad_norm": 0.03347684070467949, |
|
"learning_rate": 0.00012136286176194744, |
|
"loss": 0.5039, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.3382544507164568, |
|
"grad_norm": 0.03358198329806328, |
|
"learning_rate": 0.00012031799037864011, |
|
"loss": 0.5014, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.33868866695614414, |
|
"grad_norm": 0.02983028069138527, |
|
"learning_rate": 0.0001192770208154328, |
|
"loss": 0.4823, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3391228831958315, |
|
"grad_norm": 0.028506051748991013, |
|
"learning_rate": 0.00011823996376989849, |
|
"loss": 0.5135, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.33955709943551887, |
|
"grad_norm": 0.031358085572719574, |
|
"learning_rate": 0.00011720682989940262, |
|
"loss": 0.4677, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.33999131567520624, |
|
"grad_norm": 0.031006425619125366, |
|
"learning_rate": 0.00011617762982099444, |
|
"loss": 0.5188, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.0309920571744442, |
|
"learning_rate": 0.00011515237411129698, |
|
"loss": 0.5087, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.340859748154581, |
|
"grad_norm": 0.027928480878472328, |
|
"learning_rate": 0.0001141310733063991, |
|
"loss": 0.5005, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.34129396439426835, |
|
"grad_norm": 0.028440937399864197, |
|
"learning_rate": 0.00011311373790174655, |
|
"loss": 0.5055, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.3417281806339557, |
|
"grad_norm": 0.03135620057582855, |
|
"learning_rate": 0.00011210037835203507, |
|
"loss": 0.4947, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.3421623968736431, |
|
"grad_norm": 0.029479345306754112, |
|
"learning_rate": 0.00011109100507110132, |
|
"loss": 0.5048, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.34259661311333045, |
|
"grad_norm": 0.030894558876752853, |
|
"learning_rate": 0.00011008562843181796, |
|
"loss": 0.4726, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.3430308293530178, |
|
"grad_norm": 0.03545878455042839, |
|
"learning_rate": 0.0001090842587659851, |
|
"loss": 0.5165, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.3434650455927052, |
|
"grad_norm": 0.028414282947778702, |
|
"learning_rate": 0.00010808690636422585, |
|
"loss": 0.5008, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.34389926183239256, |
|
"grad_norm": 0.029046185314655304, |
|
"learning_rate": 0.00010709358147587883, |
|
"loss": 0.5067, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.34433347807207987, |
|
"grad_norm": 0.029067449271678925, |
|
"learning_rate": 0.00010610429430889452, |
|
"loss": 0.5039, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.34476769431176724, |
|
"grad_norm": 0.03735330328345299, |
|
"learning_rate": 0.00010511905502972886, |
|
"loss": 0.4874, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.3452019105514546, |
|
"grad_norm": 0.025629056617617607, |
|
"learning_rate": 0.00010413787376324018, |
|
"loss": 0.4743, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.345636126791142, |
|
"grad_norm": 0.03742838650941849, |
|
"learning_rate": 0.00010316076059258389, |
|
"loss": 0.4706, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.34607034303082934, |
|
"grad_norm": 0.028506414964795113, |
|
"learning_rate": 0.00010218772555910954, |
|
"loss": 0.4677, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.3465045592705167, |
|
"grad_norm": 0.028791414573788643, |
|
"learning_rate": 0.00010121877866225782, |
|
"loss": 0.4786, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.3469387755102041, |
|
"grad_norm": 0.027565713971853256, |
|
"learning_rate": 0.00010025392985945703, |
|
"loss": 0.4756, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.34737299174989145, |
|
"grad_norm": 0.037437453866004944, |
|
"learning_rate": 9.929318906602175e-05, |
|
"loss": 0.4941, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.3478072079895788, |
|
"grad_norm": 0.028768004849553108, |
|
"learning_rate": 9.833656615504976e-05, |
|
"loss": 0.5263, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.3482414242292662, |
|
"grad_norm": 0.030494874343276024, |
|
"learning_rate": 9.738407095732193e-05, |
|
"loss": 0.4933, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.34867564046895355, |
|
"grad_norm": 0.02905452810227871, |
|
"learning_rate": 9.643571326119983e-05, |
|
"loss": 0.4751, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.3491098567086409, |
|
"grad_norm": 0.02821405418217182, |
|
"learning_rate": 9.549150281252633e-05, |
|
"loss": 0.4874, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.3495440729483283, |
|
"grad_norm": 0.02889527939260006, |
|
"learning_rate": 9.455144931452459e-05, |
|
"loss": 0.4732, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.34997828918801566, |
|
"grad_norm": 0.02988606132566929, |
|
"learning_rate": 9.36155624276987e-05, |
|
"loss": 0.5044, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.35041250542770297, |
|
"grad_norm": 0.03306758031249046, |
|
"learning_rate": 9.26838517697346e-05, |
|
"loss": 0.492, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.35084672166739034, |
|
"grad_norm": 0.028606578707695007, |
|
"learning_rate": 9.175632691540064e-05, |
|
"loss": 0.4696, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.3512809379070777, |
|
"grad_norm": 0.027804942801594734, |
|
"learning_rate": 9.083299739645007e-05, |
|
"loss": 0.504, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.3517151541467651, |
|
"grad_norm": 0.02756122685968876, |
|
"learning_rate": 8.991387270152201e-05, |
|
"loss": 0.5129, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.35214937038645244, |
|
"grad_norm": 0.03051147237420082, |
|
"learning_rate": 8.899896227604509e-05, |
|
"loss": 0.5054, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.3525835866261398, |
|
"grad_norm": 0.032840099185705185, |
|
"learning_rate": 8.808827552213916e-05, |
|
"loss": 0.4926, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.3530178028658272, |
|
"grad_norm": 0.026586757972836494, |
|
"learning_rate": 8.718182179851997e-05, |
|
"loss": 0.4992, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.35345201910551455, |
|
"grad_norm": 0.030486930161714554, |
|
"learning_rate": 8.627961042040184e-05, |
|
"loss": 0.5221, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.3538862353452019, |
|
"grad_norm": 0.03037872351706028, |
|
"learning_rate": 8.538165065940262e-05, |
|
"loss": 0.4801, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.3543204515848893, |
|
"grad_norm": 0.027922067791223526, |
|
"learning_rate": 8.448795174344803e-05, |
|
"loss": 0.5011, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.35475466782457665, |
|
"grad_norm": 0.030396727845072746, |
|
"learning_rate": 8.359852285667752e-05, |
|
"loss": 0.4937, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.355188884064264, |
|
"grad_norm": 0.028359906747937202, |
|
"learning_rate": 8.271337313934868e-05, |
|
"loss": 0.4905, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.3556231003039514, |
|
"grad_norm": 0.041622817516326904, |
|
"learning_rate": 8.183251168774475e-05, |
|
"loss": 0.4829, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.35605731654363876, |
|
"grad_norm": 0.030735066160559654, |
|
"learning_rate": 8.09559475540797e-05, |
|
"loss": 0.5038, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.35649153278332607, |
|
"grad_norm": 0.030377600342035294, |
|
"learning_rate": 8.008368974640634e-05, |
|
"loss": 0.523, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.35692574902301344, |
|
"grad_norm": 0.027815300971269608, |
|
"learning_rate": 7.921574722852342e-05, |
|
"loss": 0.4758, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.3573599652627008, |
|
"grad_norm": 0.026980141177773476, |
|
"learning_rate": 7.835212891988292e-05, |
|
"loss": 0.5151, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.3577941815023882, |
|
"grad_norm": 0.028305258601903915, |
|
"learning_rate": 7.749284369549953e-05, |
|
"loss": 0.4968, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.35822839774207554, |
|
"grad_norm": 0.028862129896879196, |
|
"learning_rate": 7.663790038585794e-05, |
|
"loss": 0.5093, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.3586626139817629, |
|
"grad_norm": 0.031168105080723763, |
|
"learning_rate": 7.578730777682385e-05, |
|
"loss": 0.507, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.3590968302214503, |
|
"grad_norm": 0.02467159368097782, |
|
"learning_rate": 7.494107460955207e-05, |
|
"loss": 0.5031, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.35953104646113765, |
|
"grad_norm": 0.028088459745049477, |
|
"learning_rate": 7.409920958039795e-05, |
|
"loss": 0.5031, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.359965262700825, |
|
"grad_norm": 0.027428364381194115, |
|
"learning_rate": 7.326172134082704e-05, |
|
"loss": 0.5262, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.3603994789405124, |
|
"grad_norm": 0.033019062131643295, |
|
"learning_rate": 7.242861849732695e-05, |
|
"loss": 0.4722, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.36083369518019975, |
|
"grad_norm": 0.028655540198087692, |
|
"learning_rate": 7.159990961131818e-05, |
|
"loss": 0.4899, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.3612679114198871, |
|
"grad_norm": 0.02457267791032791, |
|
"learning_rate": 7.077560319906695e-05, |
|
"loss": 0.4989, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.3617021276595745, |
|
"grad_norm": 0.027305081486701965, |
|
"learning_rate": 6.995570773159692e-05, |
|
"loss": 0.4885, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.36213634389926186, |
|
"grad_norm": 0.026189139112830162, |
|
"learning_rate": 6.914023163460248e-05, |
|
"loss": 0.4727, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.36257056013894917, |
|
"grad_norm": 0.028178861364722252, |
|
"learning_rate": 6.832918328836246e-05, |
|
"loss": 0.5538, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.36300477637863654, |
|
"grad_norm": 0.02753199078142643, |
|
"learning_rate": 6.752257102765324e-05, |
|
"loss": 0.4991, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.3634389926183239, |
|
"grad_norm": 0.02777719311416149, |
|
"learning_rate": 6.6720403141664e-05, |
|
"loss": 0.4607, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.3638732088580113, |
|
"grad_norm": 0.026312079280614853, |
|
"learning_rate": 6.592268787391076e-05, |
|
"loss": 0.5031, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.36430742509769865, |
|
"grad_norm": 0.028296776115894318, |
|
"learning_rate": 6.512943342215233e-05, |
|
"loss": 0.4825, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.364741641337386, |
|
"grad_norm": 0.026355035603046417, |
|
"learning_rate": 6.43406479383053e-05, |
|
"loss": 0.4871, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3651758575770734, |
|
"grad_norm": 0.025023045018315315, |
|
"learning_rate": 6.355633952836115e-05, |
|
"loss": 0.4797, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.36561007381676075, |
|
"grad_norm": 0.027957437559962273, |
|
"learning_rate": 6.277651625230219e-05, |
|
"loss": 0.5078, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.3660442900564481, |
|
"grad_norm": 0.028170911595225334, |
|
"learning_rate": 6.200118612401917e-05, |
|
"loss": 0.4781, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.3664785062961355, |
|
"grad_norm": 0.032493725419044495, |
|
"learning_rate": 6.123035711122859e-05, |
|
"loss": 0.5078, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.36691272253582286, |
|
"grad_norm": 0.029985696077346802, |
|
"learning_rate": 6.046403713539139e-05, |
|
"loss": 0.478, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.3673469387755102, |
|
"grad_norm": 0.03212130442261696, |
|
"learning_rate": 5.9702234071631e-05, |
|
"loss": 0.5146, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.3677811550151976, |
|
"grad_norm": 0.024975216016173363, |
|
"learning_rate": 5.89449557486525e-05, |
|
"loss": 0.4564, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.36821537125488496, |
|
"grad_norm": 0.026881275698542595, |
|
"learning_rate": 5.8192209948662365e-05, |
|
"loss": 0.518, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.3686495874945723, |
|
"grad_norm": 0.04243510961532593, |
|
"learning_rate": 5.7444004407288256e-05, |
|
"loss": 0.5238, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.36908380373425964, |
|
"grad_norm": 0.02634822018444538, |
|
"learning_rate": 5.6700346813499945e-05, |
|
"loss": 0.5126, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.369518019973947, |
|
"grad_norm": 0.029794232919812202, |
|
"learning_rate": 5.5961244809529746e-05, |
|
"loss": 0.5017, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.3699522362136344, |
|
"grad_norm": 0.028596797958016396, |
|
"learning_rate": 5.5226705990794155e-05, |
|
"loss": 0.508, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.37038645245332175, |
|
"grad_norm": 0.04081004112958908, |
|
"learning_rate": 5.449673790581611e-05, |
|
"loss": 0.4946, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.3708206686930091, |
|
"grad_norm": 0.02494877576828003, |
|
"learning_rate": 5.377134805614714e-05, |
|
"loss": 0.5008, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.3712548849326965, |
|
"grad_norm": 0.029261523857712746, |
|
"learning_rate": 5.305054389629022e-05, |
|
"loss": 0.5155, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.37168910117238385, |
|
"grad_norm": 0.026129230856895447, |
|
"learning_rate": 5.2334332833623486e-05, |
|
"loss": 0.4979, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.3721233174120712, |
|
"grad_norm": 0.03483575955033302, |
|
"learning_rate": 5.162272222832348e-05, |
|
"loss": 0.4956, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.3725575336517586, |
|
"grad_norm": 0.028554193675518036, |
|
"learning_rate": 5.0915719393290485e-05, |
|
"loss": 0.4816, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.37299174989144596, |
|
"grad_norm": 0.028302082791924477, |
|
"learning_rate": 5.021333159407232e-05, |
|
"loss": 0.4734, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.3734259661311333, |
|
"grad_norm": 0.03633783757686615, |
|
"learning_rate": 4.9515566048790485e-05, |
|
"loss": 0.5152, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3738601823708207, |
|
"grad_norm": 0.025067532435059547, |
|
"learning_rate": 4.882242992806546e-05, |
|
"loss": 0.505, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.374294398610508, |
|
"grad_norm": 0.03426215052604675, |
|
"learning_rate": 4.813393035494329e-05, |
|
"loss": 0.4811, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.3747286148501954, |
|
"grad_norm": 0.024589484557509422, |
|
"learning_rate": 4.745007440482252e-05, |
|
"loss": 0.4881, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.37516283108988274, |
|
"grad_norm": 0.032448723912239075, |
|
"learning_rate": 4.677086910538092e-05, |
|
"loss": 0.5057, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.3755970473295701, |
|
"grad_norm": 0.029285568743944168, |
|
"learning_rate": 4.6096321436503994e-05, |
|
"loss": 0.5035, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.3760312635692575, |
|
"grad_norm": 0.02594076097011566, |
|
"learning_rate": 4.5426438330212535e-05, |
|
"loss": 0.4789, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.37646547980894485, |
|
"grad_norm": 0.028019646182656288, |
|
"learning_rate": 4.476122667059207e-05, |
|
"loss": 0.4841, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.3768996960486322, |
|
"grad_norm": 0.022888783365488052, |
|
"learning_rate": 4.410069329372152e-05, |
|
"loss": 0.4609, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.3773339122883196, |
|
"grad_norm": 0.025522053241729736, |
|
"learning_rate": 4.3444844987603426e-05, |
|
"loss": 0.5148, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.37776812852800695, |
|
"grad_norm": 0.027691571041941643, |
|
"learning_rate": 4.279368849209381e-05, |
|
"loss": 0.4689, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3782023447676943, |
|
"grad_norm": 0.02531832829117775, |
|
"learning_rate": 4.214723049883307e-05, |
|
"loss": 0.5024, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.3786365610073817, |
|
"grad_norm": 0.025689370930194855, |
|
"learning_rate": 4.150547765117746e-05, |
|
"loss": 0.4839, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.37907077724706906, |
|
"grad_norm": 0.031849976629018784, |
|
"learning_rate": 4.086843654413031e-05, |
|
"loss": 0.4888, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.3795049934867564, |
|
"grad_norm": 0.024920670315623283, |
|
"learning_rate": 4.0236113724274713e-05, |
|
"loss": 0.5088, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.3799392097264438, |
|
"grad_norm": 0.03232395276427269, |
|
"learning_rate": 3.960851568970586e-05, |
|
"loss": 0.5039, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.3803734259661311, |
|
"grad_norm": 0.025085026398301125, |
|
"learning_rate": 3.898564888996475e-05, |
|
"loss": 0.4753, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.3808076422058185, |
|
"grad_norm": 0.03306965157389641, |
|
"learning_rate": 3.83675197259713e-05, |
|
"loss": 0.493, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.38124185844550584, |
|
"grad_norm": 0.02512296289205551, |
|
"learning_rate": 3.7754134549959293e-05, |
|
"loss": 0.5048, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.3816760746851932, |
|
"grad_norm": 0.02608814276754856, |
|
"learning_rate": 3.714549966541014e-05, |
|
"loss": 0.4893, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.3821102909248806, |
|
"grad_norm": 0.02658063732087612, |
|
"learning_rate": 3.654162132698918e-05, |
|
"loss": 0.5069, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.38254450716456795, |
|
"grad_norm": 0.027712546288967133, |
|
"learning_rate": 3.594250574048058e-05, |
|
"loss": 0.4778, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.3829787234042553, |
|
"grad_norm": 0.02457358129322529, |
|
"learning_rate": 3.534815906272404e-05, |
|
"loss": 0.4946, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.3834129396439427, |
|
"grad_norm": 0.032855454832315445, |
|
"learning_rate": 3.475858740155108e-05, |
|
"loss": 0.4796, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.38384715588363005, |
|
"grad_norm": 0.02618669532239437, |
|
"learning_rate": 3.417379681572297e-05, |
|
"loss": 0.4944, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.3842813721233174, |
|
"grad_norm": 0.02916988916695118, |
|
"learning_rate": 3.3593793314867616e-05, |
|
"loss": 0.4865, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.3847155883630048, |
|
"grad_norm": 0.02979261800646782, |
|
"learning_rate": 3.3018582859418446e-05, |
|
"loss": 0.5272, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.38514980460269216, |
|
"grad_norm": 0.024905268102884293, |
|
"learning_rate": 3.2448171360552835e-05, |
|
"loss": 0.4878, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.3855840208423795, |
|
"grad_norm": 0.025121575221419334, |
|
"learning_rate": 3.18825646801314e-05, |
|
"loss": 0.4887, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.3860182370820669, |
|
"grad_norm": 0.02593301795423031, |
|
"learning_rate": 3.132176863063807e-05, |
|
"loss": 0.5255, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.3864524533217542, |
|
"grad_norm": 0.02763954922556877, |
|
"learning_rate": 3.076578897511978e-05, |
|
"loss": 0.4672, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3868866695614416, |
|
"grad_norm": 0.026684967800974846, |
|
"learning_rate": 3.0214631427127882e-05, |
|
"loss": 0.5277, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.38732088580112894, |
|
"grad_norm": 0.030234767124056816, |
|
"learning_rate": 2.9668301650658758e-05, |
|
"loss": 0.502, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.3877551020408163, |
|
"grad_norm": 0.023924171924591064, |
|
"learning_rate": 2.9126805260096256e-05, |
|
"loss": 0.4813, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.3881893182805037, |
|
"grad_norm": 0.03323635831475258, |
|
"learning_rate": 2.859014782015351e-05, |
|
"loss": 0.5422, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.38862353452019105, |
|
"grad_norm": 0.02363520860671997, |
|
"learning_rate": 2.8058334845816213e-05, |
|
"loss": 0.5091, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.3890577507598784, |
|
"grad_norm": 0.024630311876535416, |
|
"learning_rate": 2.7531371802285433e-05, |
|
"loss": 0.4642, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.3894919669995658, |
|
"grad_norm": 0.025726784020662308, |
|
"learning_rate": 2.7009264104921606e-05, |
|
"loss": 0.511, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.38992618323925315, |
|
"grad_norm": 0.02972118742763996, |
|
"learning_rate": 2.6492017119189414e-05, |
|
"loss": 0.4825, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.3903603994789405, |
|
"grad_norm": 0.024715179577469826, |
|
"learning_rate": 2.597963616060167e-05, |
|
"loss": 0.4879, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.3907946157186279, |
|
"grad_norm": 0.02915225736796856, |
|
"learning_rate": 2.547212649466568e-05, |
|
"loss": 0.4903, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.39122883195831526, |
|
"grad_norm": 0.02453678846359253, |
|
"learning_rate": 2.4969493336828353e-05, |
|
"loss": 0.5024, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.39166304819800263, |
|
"grad_norm": 0.025468653067946434, |
|
"learning_rate": 2.4471741852423235e-05, |
|
"loss": 0.4964, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.39209726443769, |
|
"grad_norm": 0.024076983332633972, |
|
"learning_rate": 2.3978877156616784e-05, |
|
"loss": 0.4872, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.3925314806773773, |
|
"grad_norm": 0.02444782853126526, |
|
"learning_rate": 2.349090431435641e-05, |
|
"loss": 0.482, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.3929656969170647, |
|
"grad_norm": 0.025424372404813766, |
|
"learning_rate": 2.3007828340318116e-05, |
|
"loss": 0.4907, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.39339991315675205, |
|
"grad_norm": 0.023807033896446228, |
|
"learning_rate": 2.252965419885483e-05, |
|
"loss": 0.5007, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.3938341293964394, |
|
"grad_norm": 0.022182505577802658, |
|
"learning_rate": 2.2056386803945726e-05, |
|
"loss": 0.4712, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.3942683456361268, |
|
"grad_norm": 0.02266152761876583, |
|
"learning_rate": 2.1588031019145637e-05, |
|
"loss": 0.479, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.39470256187581415, |
|
"grad_norm": 0.024663135409355164, |
|
"learning_rate": 2.1124591657534777e-05, |
|
"loss": 0.514, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.3951367781155015, |
|
"grad_norm": 0.023098768666386604, |
|
"learning_rate": 2.0666073481669712e-05, |
|
"loss": 0.4857, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3955709943551889, |
|
"grad_norm": 0.025155888870358467, |
|
"learning_rate": 2.0212481203534084e-05, |
|
"loss": 0.5054, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.39600521059487626, |
|
"grad_norm": 0.030762221664190292, |
|
"learning_rate": 1.9763819484490355e-05, |
|
"loss": 0.4967, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.3964394268345636, |
|
"grad_norm": 0.029380332678556442, |
|
"learning_rate": 1.932009293523196e-05, |
|
"loss": 0.5412, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.396873643074251, |
|
"grad_norm": 0.023896733298897743, |
|
"learning_rate": 1.888130611573563e-05, |
|
"loss": 0.5002, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.39730785931393836, |
|
"grad_norm": 0.024988802149891853, |
|
"learning_rate": 1.844746353521487e-05, |
|
"loss": 0.4888, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.39774207555362573, |
|
"grad_norm": 0.02422316186130047, |
|
"learning_rate": 1.801856965207338e-05, |
|
"loss": 0.4818, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.3981762917933131, |
|
"grad_norm": 0.031409528106451035, |
|
"learning_rate": 1.7594628873859486e-05, |
|
"loss": 0.4923, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.3986105080330004, |
|
"grad_norm": 0.023978758603334427, |
|
"learning_rate": 1.7175645557220565e-05, |
|
"loss": 0.4798, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.3990447242726878, |
|
"grad_norm": 0.02715397998690605, |
|
"learning_rate": 1.6761624007858522e-05, |
|
"loss": 0.4474, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.39947894051237515, |
|
"grad_norm": 0.02678792178630829, |
|
"learning_rate": 1.6352568480485276e-05, |
|
"loss": 0.514, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3999131567520625, |
|
"grad_norm": 0.02234644442796707, |
|
"learning_rate": 1.594848317877934e-05, |
|
"loss": 0.4869, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.4003473729917499, |
|
"grad_norm": 0.029807301238179207, |
|
"learning_rate": 1.5549372255342365e-05, |
|
"loss": 0.5242, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.40078158923143725, |
|
"grad_norm": 0.03037095069885254, |
|
"learning_rate": 1.5155239811656562e-05, |
|
"loss": 0.5035, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.4012158054711246, |
|
"grad_norm": 0.022767795249819756, |
|
"learning_rate": 1.4766089898042678e-05, |
|
"loss": 0.4903, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.401650021710812, |
|
"grad_norm": 0.026207391172647476, |
|
"learning_rate": 1.4381926513618138e-05, |
|
"loss": 0.4676, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.40208423795049936, |
|
"grad_norm": 0.025653038173913956, |
|
"learning_rate": 1.400275360625608e-05, |
|
"loss": 0.4804, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.4025184541901867, |
|
"grad_norm": 0.024481656029820442, |
|
"learning_rate": 1.362857507254478e-05, |
|
"loss": 0.4871, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.4029526704298741, |
|
"grad_norm": 0.02475779317319393, |
|
"learning_rate": 1.3259394757747678e-05, |
|
"loss": 0.5199, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.40338688666956146, |
|
"grad_norm": 0.02371501363813877, |
|
"learning_rate": 1.289521645576358e-05, |
|
"loss": 0.528, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.40382110290924883, |
|
"grad_norm": 0.023029111325740814, |
|
"learning_rate": 1.2536043909088191e-05, |
|
"loss": 0.4801, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.40425531914893614, |
|
"grad_norm": 0.025304608047008514, |
|
"learning_rate": 1.2181880808775025e-05, |
|
"loss": 0.4793, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.4046895353886235, |
|
"grad_norm": 0.02734360471367836, |
|
"learning_rate": 1.183273079439795e-05, |
|
"loss": 0.5022, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.4051237516283109, |
|
"grad_norm": 0.024530354887247086, |
|
"learning_rate": 1.1488597454013538e-05, |
|
"loss": 0.5032, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.40555796786799825, |
|
"grad_norm": 0.02369566634297371, |
|
"learning_rate": 1.1149484324124327e-05, |
|
"loss": 0.4633, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.4059921841076856, |
|
"grad_norm": 0.024855339899659157, |
|
"learning_rate": 1.0815394889642338e-05, |
|
"loss": 0.5175, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.406426400347373, |
|
"grad_norm": 0.023386195302009583, |
|
"learning_rate": 1.0486332583853564e-05, |
|
"loss": 0.5014, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.40686061658706035, |
|
"grad_norm": 0.029485292732715607, |
|
"learning_rate": 1.0162300788382261e-05, |
|
"loss": 0.4942, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.4072948328267477, |
|
"grad_norm": 0.023054329678416252, |
|
"learning_rate": 9.843302833156376e-06, |
|
"loss": 0.4683, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.4077290490664351, |
|
"grad_norm": 0.031031260266900063, |
|
"learning_rate": 9.529341996373674e-06, |
|
"loss": 0.5173, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.40816326530612246, |
|
"grad_norm": 0.023358464241027832, |
|
"learning_rate": 9.22042150446728e-06, |
|
"loss": 0.4722, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.4085974815458098, |
|
"grad_norm": 0.025074800476431847, |
|
"learning_rate": 8.916544532073411e-06, |
|
"loss": 0.4784, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.4090316977854972, |
|
"grad_norm": 0.02519945055246353, |
|
"learning_rate": 8.617714201998083e-06, |
|
"loss": 0.5022, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.40946591402518456, |
|
"grad_norm": 0.027328645810484886, |
|
"learning_rate": 8.323933585185184e-06, |
|
"loss": 0.4896, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.40990013026487193, |
|
"grad_norm": 0.02542622946202755, |
|
"learning_rate": 8.035205700685167e-06, |
|
"loss": 0.4676, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.41033434650455924, |
|
"grad_norm": 0.027195794507861137, |
|
"learning_rate": 7.751533515623799e-06, |
|
"loss": 0.4903, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4107685627442466, |
|
"grad_norm": 0.02717754803597927, |
|
"learning_rate": 7.47291994517163e-06, |
|
"loss": 0.4812, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.411202778983934, |
|
"grad_norm": 0.02495565079152584, |
|
"learning_rate": 7.199367852514238e-06, |
|
"loss": 0.5014, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.41163699522362135, |
|
"grad_norm": 0.024513866752386093, |
|
"learning_rate": 6.93088004882253e-06, |
|
"loss": 0.5071, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.4120712114633087, |
|
"grad_norm": 0.03526155650615692, |
|
"learning_rate": 6.667459293224154e-06, |
|
"loss": 0.531, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.4125054277029961, |
|
"grad_norm": 0.023175369948148727, |
|
"learning_rate": 6.4091082927749125e-06, |
|
"loss": 0.4811, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.41293964394268345, |
|
"grad_norm": 0.026199523359537125, |
|
"learning_rate": 6.15582970243117e-06, |
|
"loss": 0.5087, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.4133738601823708, |
|
"grad_norm": 0.024454379454255104, |
|
"learning_rate": 5.907626125022158e-06, |
|
"loss": 0.479, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.4138080764220582, |
|
"grad_norm": 0.02492498978972435, |
|
"learning_rate": 5.664500111223769e-06, |
|
"loss": 0.4977, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.41424229266174556, |
|
"grad_norm": 0.023200612515211105, |
|
"learning_rate": 5.426454159531913e-06, |
|
"loss": 0.4803, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.4146765089014329, |
|
"grad_norm": 0.024389084428548813, |
|
"learning_rate": 5.193490716237037e-06, |
|
"loss": 0.4733, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.4151107251411203, |
|
"grad_norm": 0.027189958840608597, |
|
"learning_rate": 4.9656121753990924e-06, |
|
"loss": 0.4994, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.41554494138080766, |
|
"grad_norm": 0.02239627204835415, |
|
"learning_rate": 4.7428208788224955e-06, |
|
"loss": 0.4978, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.41597915762049503, |
|
"grad_norm": 0.021820807829499245, |
|
"learning_rate": 4.52511911603265e-06, |
|
"loss": 0.4871, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.41641337386018235, |
|
"grad_norm": 0.02775476686656475, |
|
"learning_rate": 4.312509124251906e-06, |
|
"loss": 0.4918, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.4168475900998697, |
|
"grad_norm": 0.023040220141410828, |
|
"learning_rate": 4.104993088376974e-06, |
|
"loss": 0.4497, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4172818063395571, |
|
"grad_norm": 0.025357872247695923, |
|
"learning_rate": 3.9025731409561e-06, |
|
"loss": 0.4945, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.41771602257924445, |
|
"grad_norm": 0.02308405004441738, |
|
"learning_rate": 3.7052513621674833e-06, |
|
"loss": 0.4945, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.4181502388189318, |
|
"grad_norm": 0.02784230373799801, |
|
"learning_rate": 3.513029779797783e-06, |
|
"loss": 0.4543, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.4185844550586192, |
|
"grad_norm": 0.026198869571089745, |
|
"learning_rate": 3.325910369220975e-06, |
|
"loss": 0.4975, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.41901867129830656, |
|
"grad_norm": 0.023575875908136368, |
|
"learning_rate": 3.143895053378698e-06, |
|
"loss": 0.4987, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.4194528875379939, |
|
"grad_norm": 0.02861775830388069, |
|
"learning_rate": 2.966985702759828e-06, |
|
"loss": 0.4857, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.4198871037776813, |
|
"grad_norm": 0.033127009868621826, |
|
"learning_rate": 2.7951841353817676e-06, |
|
"loss": 0.465, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.42032132001736866, |
|
"grad_norm": 0.02451484464108944, |
|
"learning_rate": 2.6284921167712973e-06, |
|
"loss": 0.4863, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.42075553625705603, |
|
"grad_norm": 0.02445485256612301, |
|
"learning_rate": 2.466911359946977e-06, |
|
"loss": 0.4896, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.4211897524967434, |
|
"grad_norm": 0.02243492379784584, |
|
"learning_rate": 2.3104435254008848e-06, |
|
"loss": 0.4839, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.42162396873643077, |
|
"grad_norm": 0.026183119043707848, |
|
"learning_rate": 2.1590902210822937e-06, |
|
"loss": 0.4594, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.42205818497611813, |
|
"grad_norm": 0.025734947994351387, |
|
"learning_rate": 2.012853002380466e-06, |
|
"loss": 0.4684, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.42249240121580545, |
|
"grad_norm": 0.027873944491147995, |
|
"learning_rate": 1.8717333721091634e-06, |
|
"loss": 0.5056, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.4229266174554928, |
|
"grad_norm": 0.02484060265123844, |
|
"learning_rate": 1.735732780490884e-06, |
|
"loss": 0.5153, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.4233608336951802, |
|
"grad_norm": 0.022599438205361366, |
|
"learning_rate": 1.6048526251421502e-06, |
|
"loss": 0.4936, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.42379504993486755, |
|
"grad_norm": 0.020465485751628876, |
|
"learning_rate": 1.4790942510590765e-06, |
|
"loss": 0.4848, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.4242292661745549, |
|
"grad_norm": 0.027462849393486977, |
|
"learning_rate": 1.3584589506034362e-06, |
|
"loss": 0.4666, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.4246634824142423, |
|
"grad_norm": 0.02553846500813961, |
|
"learning_rate": 1.2429479634897267e-06, |
|
"loss": 0.4899, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.42509769865392966, |
|
"grad_norm": 0.024199191480875015, |
|
"learning_rate": 1.132562476771959e-06, |
|
"loss": 0.4792, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 0.023328760638833046, |
|
"learning_rate": 1.0273036248318324e-06, |
|
"loss": 0.4973, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4259661311333044, |
|
"grad_norm": 0.02446364425122738, |
|
"learning_rate": 9.271724893669121e-07, |
|
"loss": 0.4654, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.42640034737299176, |
|
"grad_norm": 0.02676616981625557, |
|
"learning_rate": 8.321700993795811e-07, |
|
"loss": 0.4881, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.42683456361267913, |
|
"grad_norm": 0.02478119358420372, |
|
"learning_rate": 7.422974311662722e-07, |
|
"loss": 0.5055, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.4272687798523665, |
|
"grad_norm": 0.03248545154929161, |
|
"learning_rate": 6.575554083078084e-07, |
|
"loss": 0.4949, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.42770299609205387, |
|
"grad_norm": 0.024504756554961205, |
|
"learning_rate": 5.779449016595773e-07, |
|
"loss": 0.4986, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.42813721233174123, |
|
"grad_norm": 0.02531745657324791, |
|
"learning_rate": 5.034667293427053e-07, |
|
"loss": 0.4851, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.024369893595576286, |
|
"learning_rate": 4.3412165673556436e-07, |
|
"loss": 0.5044, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.4290056448111159, |
|
"grad_norm": 0.02750435471534729, |
|
"learning_rate": 3.6991039646616656e-07, |
|
"loss": 0.468, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.4294398610508033, |
|
"grad_norm": 0.02262257970869541, |
|
"learning_rate": 3.1083360840455934e-07, |
|
"loss": 0.498, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.42987407729049065, |
|
"grad_norm": 0.024247439578175545, |
|
"learning_rate": 2.568918996560532e-07, |
|
"loss": 0.4885, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.430308293530178, |
|
"grad_norm": 0.024190355092287064, |
|
"learning_rate": 2.0808582455528192e-07, |
|
"loss": 0.541, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.4307425097698654, |
|
"grad_norm": 0.023794230073690414, |
|
"learning_rate": 1.6441588466009628e-07, |
|
"loss": 0.4616, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.43117672600955276, |
|
"grad_norm": 0.023178860545158386, |
|
"learning_rate": 1.2588252874673466e-07, |
|
"loss": 0.5077, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.4316109422492401, |
|
"grad_norm": 0.0218358151614666, |
|
"learning_rate": 9.248615280499361e-08, |
|
"loss": 0.517, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.4320451584889275, |
|
"grad_norm": 0.021379638463258743, |
|
"learning_rate": 6.422710003439747e-08, |
|
"loss": 0.4589, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.43247937472861486, |
|
"grad_norm": 0.024375783279538155, |
|
"learning_rate": 4.1105660840368156e-08, |
|
"loss": 0.4963, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.43291359096830223, |
|
"grad_norm": 0.02347472496330738, |
|
"learning_rate": 2.3122072831505136e-08, |
|
"loss": 0.4855, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.4333478072079896, |
|
"grad_norm": 0.02579214982688427, |
|
"learning_rate": 1.0276520816976388e-08, |
|
"loss": 0.4786, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.43378202344767697, |
|
"grad_norm": 0.023950695991516113, |
|
"learning_rate": 2.5691368046865116e-09, |
|
"loss": 0.5068, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.4342162396873643, |
|
"grad_norm": 0.022179216146469116, |
|
"learning_rate": 0.0, |
|
"loss": 0.4841, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4342162396873643, |
|
"step": 1000, |
|
"total_flos": 1.0100654119636173e+20, |
|
"train_loss": 0.710578792899847, |
|
"train_runtime": 20977.6715, |
|
"train_samples_per_second": 0.763, |
|
"train_steps_per_second": 0.048 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0100654119636173e+20, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|