|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9996375498368975, |
|
"eval_steps": 345, |
|
"global_step": 1379, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007249003262051468, |
|
"grad_norm": 44.75750397895495, |
|
"learning_rate": 4.7619047619047623e-07, |
|
"loss": 2.3443, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0014498006524102935, |
|
"grad_norm": 45.205818482220735, |
|
"learning_rate": 9.523809523809525e-07, |
|
"loss": 2.1445, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0021747009786154403, |
|
"grad_norm": 56.724033240854, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 2.5876, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002899601304820587, |
|
"grad_norm": 53.17611331537967, |
|
"learning_rate": 1.904761904761905e-06, |
|
"loss": 2.3983, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003624501631025734, |
|
"grad_norm": 57.8664359648513, |
|
"learning_rate": 2.380952380952381e-06, |
|
"loss": 2.1637, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004349401957230881, |
|
"grad_norm": 14.246129965796554, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.9145, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005074302283436027, |
|
"grad_norm": 11.159860869255928, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.7268, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.005799202609641174, |
|
"grad_norm": 8.887448463820203, |
|
"learning_rate": 3.80952380952381e-06, |
|
"loss": 1.5522, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.006524102935846321, |
|
"grad_norm": 6.753347075425067, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 1.5248, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.007249003262051468, |
|
"grad_norm": 11.059402659834483, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 1.5304, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007973903588256614, |
|
"grad_norm": 9.28275165512847, |
|
"learning_rate": 5.2380952380952384e-06, |
|
"loss": 1.4316, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.008698803914461761, |
|
"grad_norm": 4.714004999293759, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.331, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.009423704240666908, |
|
"grad_norm": 4.708567041689236, |
|
"learning_rate": 6.1904761904761914e-06, |
|
"loss": 1.4035, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010148604566872055, |
|
"grad_norm": 7.8340994412560665, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.4499, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.010873504893077202, |
|
"grad_norm": 5.7926127365771105, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.2915, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.011598405219282348, |
|
"grad_norm": 4.338940169766625, |
|
"learning_rate": 7.61904761904762e-06, |
|
"loss": 1.2546, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.012323305545487495, |
|
"grad_norm": 5.3158492494198875, |
|
"learning_rate": 8.095238095238097e-06, |
|
"loss": 1.3437, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.013048205871692642, |
|
"grad_norm": 4.267050487387928, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.2076, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.013773106197897789, |
|
"grad_norm": 4.289501496589146, |
|
"learning_rate": 9.047619047619049e-06, |
|
"loss": 1.2095, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.014498006524102935, |
|
"grad_norm": 4.475865685170405, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 1.2499, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015222906850308082, |
|
"grad_norm": 4.341512982701414, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3725, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01594780717651323, |
|
"grad_norm": 3.9777632208757443, |
|
"learning_rate": 1.0476190476190477e-05, |
|
"loss": 1.2679, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.016672707502718376, |
|
"grad_norm": 3.971780990603446, |
|
"learning_rate": 1.0952380952380955e-05, |
|
"loss": 1.3049, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.017397607828923523, |
|
"grad_norm": 4.355148301567439, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 1.1696, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01812250815512867, |
|
"grad_norm": 5.700836460033174, |
|
"learning_rate": 1.1904761904761905e-05, |
|
"loss": 1.3116, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.018847408481333816, |
|
"grad_norm": 4.266676186480929, |
|
"learning_rate": 1.2380952380952383e-05, |
|
"loss": 1.2132, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.019572308807538963, |
|
"grad_norm": 4.685688044042141, |
|
"learning_rate": 1.2857142857142859e-05, |
|
"loss": 1.2743, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02029720913374411, |
|
"grad_norm": 3.8278571816897977, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3062, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.021022109459949256, |
|
"grad_norm": 4.207466080048964, |
|
"learning_rate": 1.3809523809523811e-05, |
|
"loss": 1.2238, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.021747009786154403, |
|
"grad_norm": 3.772755917601343, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.2333, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02247191011235955, |
|
"grad_norm": 3.2903338340660095, |
|
"learning_rate": 1.4761904761904763e-05, |
|
"loss": 1.1223, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.023196810438564697, |
|
"grad_norm": 3.2688442989444746, |
|
"learning_rate": 1.523809523809524e-05, |
|
"loss": 1.1476, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.023921710764769843, |
|
"grad_norm": 3.6401775542496284, |
|
"learning_rate": 1.5714285714285715e-05, |
|
"loss": 1.1805, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02464661109097499, |
|
"grad_norm": 3.4329875974688004, |
|
"learning_rate": 1.6190476190476193e-05, |
|
"loss": 1.1406, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.025371511417180137, |
|
"grad_norm": 3.457570825026417, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.1447, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.026096411743385284, |
|
"grad_norm": 3.503397413974539, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 1.13, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02682131206959043, |
|
"grad_norm": 2.880316116737165, |
|
"learning_rate": 1.761904761904762e-05, |
|
"loss": 1.2275, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.027546212395795577, |
|
"grad_norm": 3.4491978928897686, |
|
"learning_rate": 1.8095238095238097e-05, |
|
"loss": 1.2675, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.028271112722000724, |
|
"grad_norm": 3.4801420565131695, |
|
"learning_rate": 1.8571428571428575e-05, |
|
"loss": 1.1656, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02899601304820587, |
|
"grad_norm": 3.154782654672038, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 1.157, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.029720913374411018, |
|
"grad_norm": 3.0915926527583633, |
|
"learning_rate": 1.9523809523809524e-05, |
|
"loss": 1.1432, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.030445813700616164, |
|
"grad_norm": 2.772154104430272, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0994, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03117071402682131, |
|
"grad_norm": 3.0517198878965304, |
|
"learning_rate": 1.9999972393793303e-05, |
|
"loss": 1.2549, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03189561435302646, |
|
"grad_norm": 2.6750901532162987, |
|
"learning_rate": 1.9999889575325633e-05, |
|
"loss": 1.143, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.032620514679231605, |
|
"grad_norm": 3.6101390652119516, |
|
"learning_rate": 1.9999751545054247e-05, |
|
"loss": 1.2469, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03334541500543675, |
|
"grad_norm": 2.843300653538732, |
|
"learning_rate": 1.9999558303741244e-05, |
|
"loss": 1.061, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0340703153316419, |
|
"grad_norm": 3.9392334069856907, |
|
"learning_rate": 1.9999309852453556e-05, |
|
"loss": 1.2809, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.034795215657847045, |
|
"grad_norm": 3.773339147888779, |
|
"learning_rate": 1.9999006192562948e-05, |
|
"loss": 1.3008, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03552011598405219, |
|
"grad_norm": 2.7832080749846453, |
|
"learning_rate": 1.9998647325745995e-05, |
|
"loss": 1.1342, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03624501631025734, |
|
"grad_norm": 3.7382629441207667, |
|
"learning_rate": 1.9998233253984088e-05, |
|
"loss": 1.2641, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.036969916636462485, |
|
"grad_norm": 2.799440280567736, |
|
"learning_rate": 1.9997763979563418e-05, |
|
"loss": 1.2242, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03769481696266763, |
|
"grad_norm": 3.3316333443131696, |
|
"learning_rate": 1.9997239505074954e-05, |
|
"loss": 1.2312, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03841971728887278, |
|
"grad_norm": 5.725082920380033, |
|
"learning_rate": 1.999665983341446e-05, |
|
"loss": 1.4632, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.039144617615077926, |
|
"grad_norm": 2.553768110728692, |
|
"learning_rate": 1.9996024967782436e-05, |
|
"loss": 1.2164, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03986951794128307, |
|
"grad_norm": 3.041402503331704, |
|
"learning_rate": 1.9995334911684127e-05, |
|
"loss": 1.2156, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04059441826748822, |
|
"grad_norm": 2.595475331293293, |
|
"learning_rate": 1.99945896689295e-05, |
|
"loss": 1.1247, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.041319318593693366, |
|
"grad_norm": 2.939590634487055, |
|
"learning_rate": 1.9993789243633227e-05, |
|
"loss": 1.2171, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04204421891989851, |
|
"grad_norm": 2.6474772321680606, |
|
"learning_rate": 1.999293364021464e-05, |
|
"loss": 1.187, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04276911924610366, |
|
"grad_norm": 2.901881436981748, |
|
"learning_rate": 1.9992022863397737e-05, |
|
"loss": 1.1224, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.043494019572308806, |
|
"grad_norm": 2.9320116081953835, |
|
"learning_rate": 1.999105691821113e-05, |
|
"loss": 1.1692, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04421891989851395, |
|
"grad_norm": 3.5247973111889004, |
|
"learning_rate": 1.9990035809988045e-05, |
|
"loss": 1.1575, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0449438202247191, |
|
"grad_norm": 2.669047070211342, |
|
"learning_rate": 1.9988959544366265e-05, |
|
"loss": 1.1433, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04566872055092425, |
|
"grad_norm": 3.213177659539329, |
|
"learning_rate": 1.9987828127288105e-05, |
|
"loss": 1.2635, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04639362087712939, |
|
"grad_norm": 3.3559565945133554, |
|
"learning_rate": 1.99866415650004e-05, |
|
"loss": 1.3022, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04711852120333454, |
|
"grad_norm": 2.3670279547756894, |
|
"learning_rate": 1.998539986405444e-05, |
|
"loss": 1.0142, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04784342152953969, |
|
"grad_norm": 3.037710920162538, |
|
"learning_rate": 1.998410303130596e-05, |
|
"loss": 1.2591, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.048568321855744834, |
|
"grad_norm": 2.865611039939188, |
|
"learning_rate": 1.9982751073915087e-05, |
|
"loss": 1.1993, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04929322218194998, |
|
"grad_norm": 3.48123837358242, |
|
"learning_rate": 1.9981343999346302e-05, |
|
"loss": 1.2121, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05001812250815513, |
|
"grad_norm": 3.2299518093080977, |
|
"learning_rate": 1.9979881815368406e-05, |
|
"loss": 1.2152, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.050743022834360274, |
|
"grad_norm": 3.0034023909101233, |
|
"learning_rate": 1.9978364530054465e-05, |
|
"loss": 1.1339, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05146792316056542, |
|
"grad_norm": 3.4428257188696794, |
|
"learning_rate": 1.9976792151781778e-05, |
|
"loss": 1.2363, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05219282348677057, |
|
"grad_norm": 2.9622416635006408, |
|
"learning_rate": 1.997516468923183e-05, |
|
"loss": 1.274, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.052917723812975714, |
|
"grad_norm": 2.5702004303735455, |
|
"learning_rate": 1.9973482151390228e-05, |
|
"loss": 1.1873, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05364262413918086, |
|
"grad_norm": 3.3076211834809492, |
|
"learning_rate": 1.9971744547546676e-05, |
|
"loss": 1.228, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05436752446538601, |
|
"grad_norm": 2.6091996272999243, |
|
"learning_rate": 1.99699518872949e-05, |
|
"loss": 1.0701, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.055092424791591155, |
|
"grad_norm": 2.8268323348853044, |
|
"learning_rate": 1.996810418053261e-05, |
|
"loss": 1.2859, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0558173251177963, |
|
"grad_norm": 2.5281726771474244, |
|
"learning_rate": 1.996620143746144e-05, |
|
"loss": 1.1387, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05654222544400145, |
|
"grad_norm": 2.300493492648369, |
|
"learning_rate": 1.99642436685869e-05, |
|
"loss": 1.0669, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.057267125770206595, |
|
"grad_norm": 3.493223728132168, |
|
"learning_rate": 1.9962230884718296e-05, |
|
"loss": 1.1832, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05799202609641174, |
|
"grad_norm": 2.7188006009604293, |
|
"learning_rate": 1.9960163096968702e-05, |
|
"loss": 1.1356, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05871692642261689, |
|
"grad_norm": 2.7364057030270956, |
|
"learning_rate": 1.9958040316754866e-05, |
|
"loss": 1.2446, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.059441826748822035, |
|
"grad_norm": 2.7307874925286475, |
|
"learning_rate": 1.995586255579717e-05, |
|
"loss": 1.1808, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06016672707502718, |
|
"grad_norm": 2.4215808753597226, |
|
"learning_rate": 1.9953629826119562e-05, |
|
"loss": 1.1579, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06089162740123233, |
|
"grad_norm": 2.631267713200398, |
|
"learning_rate": 1.9951342140049483e-05, |
|
"loss": 1.1941, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.061616527727437476, |
|
"grad_norm": 2.673115849546717, |
|
"learning_rate": 1.994899951021779e-05, |
|
"loss": 1.2284, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06234142805364262, |
|
"grad_norm": 2.6934406037224807, |
|
"learning_rate": 1.9946601949558722e-05, |
|
"loss": 1.206, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06306632837984777, |
|
"grad_norm": 3.7302320786317673, |
|
"learning_rate": 1.9944149471309777e-05, |
|
"loss": 1.2247, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06379122870605292, |
|
"grad_norm": 2.4550165493932363, |
|
"learning_rate": 1.9941642089011684e-05, |
|
"loss": 1.0596, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 2.868687433143366, |
|
"learning_rate": 1.9939079816508312e-05, |
|
"loss": 1.1435, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06524102935846321, |
|
"grad_norm": 3.0984151506536586, |
|
"learning_rate": 1.9936462667946574e-05, |
|
"loss": 1.1143, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06596592968466836, |
|
"grad_norm": 2.483528934101709, |
|
"learning_rate": 1.993379065777639e-05, |
|
"loss": 1.1165, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0666908300108735, |
|
"grad_norm": 2.4080631755195925, |
|
"learning_rate": 1.9931063800750564e-05, |
|
"loss": 1.1144, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06741573033707865, |
|
"grad_norm": 2.8973853952983375, |
|
"learning_rate": 1.9928282111924736e-05, |
|
"loss": 1.2365, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0681406306632838, |
|
"grad_norm": 3.3833221119081656, |
|
"learning_rate": 1.9925445606657286e-05, |
|
"loss": 1.2146, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06886553098948894, |
|
"grad_norm": 2.908934165124817, |
|
"learning_rate": 1.9922554300609237e-05, |
|
"loss": 1.1916, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06959043131569409, |
|
"grad_norm": 2.512496806932867, |
|
"learning_rate": 1.991960820974419e-05, |
|
"loss": 1.0935, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07031533164189924, |
|
"grad_norm": 2.825963677244521, |
|
"learning_rate": 1.991660735032822e-05, |
|
"loss": 1.0931, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07104023196810438, |
|
"grad_norm": 2.8818264863814296, |
|
"learning_rate": 1.9913551738929803e-05, |
|
"loss": 1.2153, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07176513229430953, |
|
"grad_norm": 2.7275750720550147, |
|
"learning_rate": 1.9910441392419702e-05, |
|
"loss": 1.193, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07249003262051468, |
|
"grad_norm": 3.462468824391245, |
|
"learning_rate": 1.9907276327970892e-05, |
|
"loss": 1.1469, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07321493294671982, |
|
"grad_norm": 3.3377754324869224, |
|
"learning_rate": 1.990405656305846e-05, |
|
"loss": 1.2357, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07393983327292497, |
|
"grad_norm": 2.839009968658976, |
|
"learning_rate": 1.9900782115459503e-05, |
|
"loss": 1.1848, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07466473359913012, |
|
"grad_norm": 2.1391099705614063, |
|
"learning_rate": 1.9897453003253035e-05, |
|
"loss": 1.1225, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07538963392533526, |
|
"grad_norm": 2.7719189493972243, |
|
"learning_rate": 1.9894069244819893e-05, |
|
"loss": 1.1538, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07611453425154041, |
|
"grad_norm": 2.3940421507214933, |
|
"learning_rate": 1.9890630858842614e-05, |
|
"loss": 1.2852, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07683943457774556, |
|
"grad_norm": 2.341837519488398, |
|
"learning_rate": 1.9887137864305365e-05, |
|
"loss": 1.1116, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.0775643349039507, |
|
"grad_norm": 2.403845819495617, |
|
"learning_rate": 1.9883590280493815e-05, |
|
"loss": 1.1509, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.07828923523015585, |
|
"grad_norm": 3.348129496258059, |
|
"learning_rate": 1.9879988126995023e-05, |
|
"loss": 1.1837, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.079014135556361, |
|
"grad_norm": 2.591536638052907, |
|
"learning_rate": 1.9876331423697345e-05, |
|
"loss": 1.1223, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.07973903588256614, |
|
"grad_norm": 2.3592029882899648, |
|
"learning_rate": 1.9872620190790334e-05, |
|
"loss": 1.143, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08046393620877129, |
|
"grad_norm": 2.1392569469392346, |
|
"learning_rate": 1.9868854448764594e-05, |
|
"loss": 1.0607, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08118883653497644, |
|
"grad_norm": 2.863527881373612, |
|
"learning_rate": 1.9865034218411698e-05, |
|
"loss": 1.1717, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.08191373686118159, |
|
"grad_norm": 2.924899234680409, |
|
"learning_rate": 1.986115952082406e-05, |
|
"loss": 1.2318, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08263863718738673, |
|
"grad_norm": 2.433809686371776, |
|
"learning_rate": 1.985723037739482e-05, |
|
"loss": 1.2658, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08336353751359188, |
|
"grad_norm": 2.8856283656848905, |
|
"learning_rate": 1.9853246809817725e-05, |
|
"loss": 1.3125, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08408843783979703, |
|
"grad_norm": 2.5486082662488627, |
|
"learning_rate": 1.9849208840087018e-05, |
|
"loss": 1.1655, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08481333816600217, |
|
"grad_norm": 2.769678541141219, |
|
"learning_rate": 1.98451164904973e-05, |
|
"loss": 1.1953, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08553823849220732, |
|
"grad_norm": 2.473525065101367, |
|
"learning_rate": 1.984096978364342e-05, |
|
"loss": 1.1201, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08626313881841247, |
|
"grad_norm": 2.7533405016925583, |
|
"learning_rate": 1.9836768742420355e-05, |
|
"loss": 1.2437, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08698803914461761, |
|
"grad_norm": 2.4288996024121605, |
|
"learning_rate": 1.983251339002306e-05, |
|
"loss": 1.2333, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08771293947082276, |
|
"grad_norm": 2.8989781107762616, |
|
"learning_rate": 1.9828203749946362e-05, |
|
"loss": 1.1184, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0884378397970279, |
|
"grad_norm": 2.3092483374926527, |
|
"learning_rate": 1.982383984598483e-05, |
|
"loss": 1.0504, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.08916274012323305, |
|
"grad_norm": 2.714849936506903, |
|
"learning_rate": 1.9819421702232624e-05, |
|
"loss": 1.2218, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0898876404494382, |
|
"grad_norm": 2.8105161825117286, |
|
"learning_rate": 1.9814949343083383e-05, |
|
"loss": 1.0007, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.09061254077564335, |
|
"grad_norm": 4.147738573133108, |
|
"learning_rate": 1.981042279323009e-05, |
|
"loss": 1.2543, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0913374411018485, |
|
"grad_norm": 2.8310310979565783, |
|
"learning_rate": 1.9805842077664913e-05, |
|
"loss": 1.146, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09206234142805364, |
|
"grad_norm": 3.0754511920919376, |
|
"learning_rate": 1.9801207221679085e-05, |
|
"loss": 1.203, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.09278724175425879, |
|
"grad_norm": 2.7063407512582787, |
|
"learning_rate": 1.979651825086277e-05, |
|
"loss": 1.0783, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.09351214208046393, |
|
"grad_norm": 2.511083096485902, |
|
"learning_rate": 1.97917751911049e-05, |
|
"loss": 1.1531, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09423704240666908, |
|
"grad_norm": 2.4741075426355614, |
|
"learning_rate": 1.9786978068593062e-05, |
|
"loss": 1.1428, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09496194273287423, |
|
"grad_norm": 2.5626898456386815, |
|
"learning_rate": 1.978212690981332e-05, |
|
"loss": 1.1239, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.09568684305907937, |
|
"grad_norm": 2.2391021429874263, |
|
"learning_rate": 1.9777221741550096e-05, |
|
"loss": 1.0702, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09641174338528452, |
|
"grad_norm": 2.4369139499586074, |
|
"learning_rate": 1.9772262590886006e-05, |
|
"loss": 1.1692, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09713664371148967, |
|
"grad_norm": 2.7781914212064747, |
|
"learning_rate": 1.976724948520172e-05, |
|
"loss": 1.1412, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09786154403769481, |
|
"grad_norm": 2.5311023412287623, |
|
"learning_rate": 1.9762182452175806e-05, |
|
"loss": 1.2879, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09858644436389996, |
|
"grad_norm": 3.2212943074374327, |
|
"learning_rate": 1.9757061519784568e-05, |
|
"loss": 1.1971, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.09931134469010511, |
|
"grad_norm": 2.549155447627049, |
|
"learning_rate": 1.9751886716301914e-05, |
|
"loss": 1.1439, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.10003624501631025, |
|
"grad_norm": 2.59925115990486, |
|
"learning_rate": 1.9746658070299188e-05, |
|
"loss": 1.1126, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1007611453425154, |
|
"grad_norm": 3.1442581997348364, |
|
"learning_rate": 1.9741375610645e-05, |
|
"loss": 1.166, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.10148604566872055, |
|
"grad_norm": 2.7390978310814456, |
|
"learning_rate": 1.9736039366505087e-05, |
|
"loss": 1.1593, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1022109459949257, |
|
"grad_norm": 2.68099301776624, |
|
"learning_rate": 1.9730649367342137e-05, |
|
"loss": 1.091, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.10293584632113084, |
|
"grad_norm": 2.5055718294224474, |
|
"learning_rate": 1.9725205642915645e-05, |
|
"loss": 1.1567, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.10366074664733599, |
|
"grad_norm": 2.4806609688516064, |
|
"learning_rate": 1.971970822328172e-05, |
|
"loss": 1.1877, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.10438564697354114, |
|
"grad_norm": 2.446019560687823, |
|
"learning_rate": 1.9714157138792946e-05, |
|
"loss": 0.8962, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.10511054729974628, |
|
"grad_norm": 2.6145362990605006, |
|
"learning_rate": 1.9708552420098196e-05, |
|
"loss": 1.1652, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.10583544762595143, |
|
"grad_norm": 2.8215724838180303, |
|
"learning_rate": 1.9702894098142476e-05, |
|
"loss": 1.1084, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.10656034795215658, |
|
"grad_norm": 2.3497120281958503, |
|
"learning_rate": 1.969718220416675e-05, |
|
"loss": 1.167, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.10728524827836172, |
|
"grad_norm": 2.583947968920025, |
|
"learning_rate": 1.9691416769707763e-05, |
|
"loss": 1.1908, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.10801014860456687, |
|
"grad_norm": 2.751597734547836, |
|
"learning_rate": 1.968559782659787e-05, |
|
"loss": 1.2345, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.10873504893077202, |
|
"grad_norm": 2.6976314838665685, |
|
"learning_rate": 1.9679725406964853e-05, |
|
"loss": 1.1939, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10945994925697716, |
|
"grad_norm": 2.1954623751957905, |
|
"learning_rate": 1.967379954323177e-05, |
|
"loss": 1.1555, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11018484958318231, |
|
"grad_norm": 2.7043983207505624, |
|
"learning_rate": 1.9667820268116732e-05, |
|
"loss": 1.1899, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11090974990938746, |
|
"grad_norm": 2.076332249665599, |
|
"learning_rate": 1.966178761463277e-05, |
|
"loss": 1.1009, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1116346502355926, |
|
"grad_norm": 2.3441526940711097, |
|
"learning_rate": 1.965570161608762e-05, |
|
"loss": 1.1319, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11235955056179775, |
|
"grad_norm": 2.2285061151426113, |
|
"learning_rate": 1.964956230608354e-05, |
|
"loss": 1.1211, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1130844508880029, |
|
"grad_norm": 2.3200577550605064, |
|
"learning_rate": 1.9643369718517152e-05, |
|
"loss": 1.1515, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.11380935121420804, |
|
"grad_norm": 2.5448094549925844, |
|
"learning_rate": 1.9637123887579217e-05, |
|
"loss": 1.302, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.11453425154041319, |
|
"grad_norm": 2.1389116260296714, |
|
"learning_rate": 1.963082484775448e-05, |
|
"loss": 1.1653, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.11525915186661834, |
|
"grad_norm": 2.752841944773758, |
|
"learning_rate": 1.9624472633821464e-05, |
|
"loss": 1.1178, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.11598405219282348, |
|
"grad_norm": 2.3858753840621345, |
|
"learning_rate": 1.961806728085227e-05, |
|
"loss": 1.0138, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11670895251902863, |
|
"grad_norm": 2.6783021219358694, |
|
"learning_rate": 1.9611608824212395e-05, |
|
"loss": 1.2371, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.11743385284523378, |
|
"grad_norm": 2.626777858203357, |
|
"learning_rate": 1.9605097299560545e-05, |
|
"loss": 1.0034, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11815875317143892, |
|
"grad_norm": 2.333729825834433, |
|
"learning_rate": 1.959853274284841e-05, |
|
"loss": 1.1103, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11888365349764407, |
|
"grad_norm": 2.5562196927508425, |
|
"learning_rate": 1.95919151903205e-05, |
|
"loss": 1.2076, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11960855382384922, |
|
"grad_norm": 3.2522325367194425, |
|
"learning_rate": 1.9585244678513913e-05, |
|
"loss": 1.258, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12033345415005436, |
|
"grad_norm": 3.792145378660367, |
|
"learning_rate": 1.9578521244258162e-05, |
|
"loss": 1.1639, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12105835447625951, |
|
"grad_norm": 3.4172955897048207, |
|
"learning_rate": 1.9571744924674943e-05, |
|
"loss": 1.1899, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.12178325480246466, |
|
"grad_norm": 2.884712209279333, |
|
"learning_rate": 1.9564915757177955e-05, |
|
"loss": 1.1658, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1225081551286698, |
|
"grad_norm": 2.584866024173676, |
|
"learning_rate": 1.955803377947268e-05, |
|
"loss": 1.1995, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.12323305545487495, |
|
"grad_norm": 2.608204074681373, |
|
"learning_rate": 1.955109902955617e-05, |
|
"loss": 1.1891, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1239579557810801, |
|
"grad_norm": 3.021997715506911, |
|
"learning_rate": 1.954411154571687e-05, |
|
"loss": 1.1485, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.12468285610728524, |
|
"grad_norm": 2.136848876157592, |
|
"learning_rate": 1.9537071366534352e-05, |
|
"loss": 1.0598, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1254077564334904, |
|
"grad_norm": 2.4835581901419332, |
|
"learning_rate": 1.9529978530879144e-05, |
|
"loss": 1.1085, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.12613265675969554, |
|
"grad_norm": 2.3827179769931823, |
|
"learning_rate": 1.9522833077912512e-05, |
|
"loss": 1.1484, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.12685755708590069, |
|
"grad_norm": 2.256838168033284, |
|
"learning_rate": 1.951563504708622e-05, |
|
"loss": 1.1174, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12758245741210583, |
|
"grad_norm": 2.677664336590985, |
|
"learning_rate": 1.950838447814233e-05, |
|
"loss": 1.1908, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.12830735773831098, |
|
"grad_norm": 2.851750128481314, |
|
"learning_rate": 1.9501081411112987e-05, |
|
"loss": 1.2887, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 2.299218668392714, |
|
"learning_rate": 1.9493725886320193e-05, |
|
"loss": 1.0708, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.12975715839072127, |
|
"grad_norm": 2.654535712782758, |
|
"learning_rate": 1.9486317944375563e-05, |
|
"loss": 1.1428, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13048205871692642, |
|
"grad_norm": 3.1562180739485184, |
|
"learning_rate": 1.947885762618014e-05, |
|
"loss": 1.1446, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13120695904313157, |
|
"grad_norm": 2.4022359841083762, |
|
"learning_rate": 1.9471344972924142e-05, |
|
"loss": 1.1913, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1319318593693367, |
|
"grad_norm": 1.9874430354522221, |
|
"learning_rate": 1.9463780026086735e-05, |
|
"loss": 1.1536, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13265675969554186, |
|
"grad_norm": 2.6028657856098762, |
|
"learning_rate": 1.945616282743582e-05, |
|
"loss": 1.0459, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.133381660021747, |
|
"grad_norm": 2.4961285769674584, |
|
"learning_rate": 1.9448493419027795e-05, |
|
"loss": 1.2184, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.13410656034795215, |
|
"grad_norm": 2.4545736340279607, |
|
"learning_rate": 1.9440771843207305e-05, |
|
"loss": 1.0966, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1348314606741573, |
|
"grad_norm": 2.4064312104977796, |
|
"learning_rate": 1.9432998142607036e-05, |
|
"loss": 1.1427, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.13555636100036245, |
|
"grad_norm": 2.10260997961401, |
|
"learning_rate": 1.9425172360147467e-05, |
|
"loss": 1.153, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1362812613265676, |
|
"grad_norm": 2.2184620974349, |
|
"learning_rate": 1.9417294539036634e-05, |
|
"loss": 1.1356, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.13700616165277274, |
|
"grad_norm": 2.715658298293896, |
|
"learning_rate": 1.9409364722769882e-05, |
|
"loss": 1.2261, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1377310619789779, |
|
"grad_norm": 2.240649838269821, |
|
"learning_rate": 1.9401382955129646e-05, |
|
"loss": 1.0599, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13845596230518303, |
|
"grad_norm": 3.0178674865097728, |
|
"learning_rate": 1.9393349280185187e-05, |
|
"loss": 1.2233, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.13918086263138818, |
|
"grad_norm": 2.132340826672663, |
|
"learning_rate": 1.9385263742292368e-05, |
|
"loss": 1.0894, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.13990576295759333, |
|
"grad_norm": 2.4647699353823733, |
|
"learning_rate": 1.937712638609339e-05, |
|
"loss": 1.2108, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.14063066328379847, |
|
"grad_norm": 2.4490272195884852, |
|
"learning_rate": 1.9368937256516567e-05, |
|
"loss": 1.1608, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14135556361000362, |
|
"grad_norm": 2.1989447827390665, |
|
"learning_rate": 1.9360696398776056e-05, |
|
"loss": 1.1457, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14208046393620877, |
|
"grad_norm": 2.068383189183824, |
|
"learning_rate": 1.9352403858371618e-05, |
|
"loss": 1.1081, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.14280536426241391, |
|
"grad_norm": 2.0311982289386474, |
|
"learning_rate": 1.9344059681088372e-05, |
|
"loss": 1.1035, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.14353026458861906, |
|
"grad_norm": 2.486963653282551, |
|
"learning_rate": 1.933566391299654e-05, |
|
"loss": 1.0822, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1442551649148242, |
|
"grad_norm": 2.3367902123479314, |
|
"learning_rate": 1.9327216600451177e-05, |
|
"loss": 1.017, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.14498006524102935, |
|
"grad_norm": 2.390926126929064, |
|
"learning_rate": 1.931871779009194e-05, |
|
"loss": 1.0842, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1457049655672345, |
|
"grad_norm": 3.3906627893855936, |
|
"learning_rate": 1.9310167528842808e-05, |
|
"loss": 1.2239, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.14642986589343965, |
|
"grad_norm": 2.36160531790082, |
|
"learning_rate": 1.9301565863911836e-05, |
|
"loss": 1.1343, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.1471547662196448, |
|
"grad_norm": 2.4172767818260885, |
|
"learning_rate": 1.9292912842790893e-05, |
|
"loss": 1.1009, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.14787966654584994, |
|
"grad_norm": 2.391111956498114, |
|
"learning_rate": 1.92842085132554e-05, |
|
"loss": 1.1245, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1486045668720551, |
|
"grad_norm": 2.9512769517526527, |
|
"learning_rate": 1.927545292336406e-05, |
|
"loss": 1.1341, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.14932946719826024, |
|
"grad_norm": 2.247325705969668, |
|
"learning_rate": 1.9266646121458597e-05, |
|
"loss": 1.1099, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.15005436752446538, |
|
"grad_norm": 4.45573844838241, |
|
"learning_rate": 1.9257788156163484e-05, |
|
"loss": 1.2177, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.15077926785067053, |
|
"grad_norm": 2.308206270902384, |
|
"learning_rate": 1.9248879076385696e-05, |
|
"loss": 1.1115, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.15150416817687568, |
|
"grad_norm": 2.655262555875558, |
|
"learning_rate": 1.9239918931314404e-05, |
|
"loss": 1.1081, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.15222906850308082, |
|
"grad_norm": 2.939655738673358, |
|
"learning_rate": 1.9230907770420737e-05, |
|
"loss": 1.1192, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15295396882928597, |
|
"grad_norm": 2.7252753733179564, |
|
"learning_rate": 1.9221845643457485e-05, |
|
"loss": 1.1471, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.15367886915549112, |
|
"grad_norm": 2.7708259653245677, |
|
"learning_rate": 1.9212732600458845e-05, |
|
"loss": 1.0776, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.15440376948169626, |
|
"grad_norm": 1.9749460870383841, |
|
"learning_rate": 1.9203568691740115e-05, |
|
"loss": 1.0709, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.1551286698079014, |
|
"grad_norm": 2.0889553423500353, |
|
"learning_rate": 1.9194353967897453e-05, |
|
"loss": 1.1986, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.15585357013410656, |
|
"grad_norm": 2.5179546631862624, |
|
"learning_rate": 1.9185088479807577e-05, |
|
"loss": 1.2635, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1565784704603117, |
|
"grad_norm": 2.7700789430710424, |
|
"learning_rate": 1.9175772278627477e-05, |
|
"loss": 1.2196, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.15730337078651685, |
|
"grad_norm": 2.3103773826109313, |
|
"learning_rate": 1.916640541579415e-05, |
|
"loss": 1.1924, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.158028271112722, |
|
"grad_norm": 3.7179435673997587, |
|
"learning_rate": 1.9156987943024305e-05, |
|
"loss": 1.0714, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.15875317143892714, |
|
"grad_norm": 3.047980163715747, |
|
"learning_rate": 1.9147519912314086e-05, |
|
"loss": 1.0348, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.1594780717651323, |
|
"grad_norm": 2.3355941636809323, |
|
"learning_rate": 1.9138001375938773e-05, |
|
"loss": 1.0714, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16020297209133744, |
|
"grad_norm": 2.4367237951314764, |
|
"learning_rate": 1.9128432386452503e-05, |
|
"loss": 1.0923, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.16092787241754258, |
|
"grad_norm": 2.534850993992721, |
|
"learning_rate": 1.9118812996687975e-05, |
|
"loss": 1.2025, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.16165277274374773, |
|
"grad_norm": 2.6850585710213637, |
|
"learning_rate": 1.910914325975616e-05, |
|
"loss": 1.2315, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.16237767306995288, |
|
"grad_norm": 2.6390954960986, |
|
"learning_rate": 1.9099423229046015e-05, |
|
"loss": 1.171, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.16310257339615802, |
|
"grad_norm": 3.674303992733298, |
|
"learning_rate": 1.908965295822417e-05, |
|
"loss": 1.22, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16382747372236317, |
|
"grad_norm": 2.3198272055347413, |
|
"learning_rate": 1.907983250123465e-05, |
|
"loss": 0.9999, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.16455237404856832, |
|
"grad_norm": 2.367941394706144, |
|
"learning_rate": 1.906996191229857e-05, |
|
"loss": 1.1324, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.16527727437477346, |
|
"grad_norm": 2.2348457223954536, |
|
"learning_rate": 1.906004124591383e-05, |
|
"loss": 1.1695, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.1660021747009786, |
|
"grad_norm": 2.5357413278993373, |
|
"learning_rate": 1.905007055685483e-05, |
|
"loss": 1.2143, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.16672707502718376, |
|
"grad_norm": 2.197512694165324, |
|
"learning_rate": 1.904004990017214e-05, |
|
"loss": 1.059, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1674519753533889, |
|
"grad_norm": 2.246714216934824, |
|
"learning_rate": 1.902997933119223e-05, |
|
"loss": 1.0306, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.16817687567959405, |
|
"grad_norm": 2.3508232503422257, |
|
"learning_rate": 1.9019858905517146e-05, |
|
"loss": 1.232, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.1689017760057992, |
|
"grad_norm": 3.2397999719968706, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 1.1642, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.16962667633200434, |
|
"grad_norm": 2.2082937384067134, |
|
"learning_rate": 1.899946870786565e-05, |
|
"loss": 1.0615, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.1703515766582095, |
|
"grad_norm": 2.6489946010926437, |
|
"learning_rate": 1.8989199048468443e-05, |
|
"loss": 1.2625, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.17107647698441464, |
|
"grad_norm": 2.243711710470245, |
|
"learning_rate": 1.8978879757533838e-05, |
|
"loss": 1.1316, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.17180137731061979, |
|
"grad_norm": 2.846092716410688, |
|
"learning_rate": 1.8968510892037136e-05, |
|
"loss": 1.2307, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.17252627763682493, |
|
"grad_norm": 2.7609914990765128, |
|
"learning_rate": 1.8958092509227347e-05, |
|
"loss": 1.1384, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.17325117796303008, |
|
"grad_norm": 1.8868238757930658, |
|
"learning_rate": 1.8947624666626866e-05, |
|
"loss": 1.0692, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.17397607828923523, |
|
"grad_norm": 2.5980341998176146, |
|
"learning_rate": 1.893710742203119e-05, |
|
"loss": 1.1284, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17470097861544037, |
|
"grad_norm": 2.7513723878066982, |
|
"learning_rate": 1.8926540833508557e-05, |
|
"loss": 1.0836, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.17542587894164552, |
|
"grad_norm": 2.6788196765201535, |
|
"learning_rate": 1.8915924959399656e-05, |
|
"loss": 1.1034, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.17615077926785067, |
|
"grad_norm": 2.5118625400183023, |
|
"learning_rate": 1.8905259858317287e-05, |
|
"loss": 1.2339, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.1768756795940558, |
|
"grad_norm": 2.126239831693274, |
|
"learning_rate": 1.889454558914605e-05, |
|
"loss": 1.0637, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.17760057992026096, |
|
"grad_norm": 2.604602180586931, |
|
"learning_rate": 1.888378221104201e-05, |
|
"loss": 1.2855, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1783254802464661, |
|
"grad_norm": 2.4753012194229562, |
|
"learning_rate": 1.8872969783432376e-05, |
|
"loss": 1.0602, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.17905038057267125, |
|
"grad_norm": 2.6860064054799584, |
|
"learning_rate": 1.886210836601517e-05, |
|
"loss": 1.1418, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.1797752808988764, |
|
"grad_norm": 2.5126660296525833, |
|
"learning_rate": 1.8851198018758898e-05, |
|
"loss": 1.1937, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.18050018122508155, |
|
"grad_norm": 1.7715675237469672, |
|
"learning_rate": 1.884023880190222e-05, |
|
"loss": 1.0683, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.1812250815512867, |
|
"grad_norm": 2.3549127021241048, |
|
"learning_rate": 1.8829230775953616e-05, |
|
"loss": 1.1331, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.18194998187749184, |
|
"grad_norm": 2.043319865990913, |
|
"learning_rate": 1.8818174001691055e-05, |
|
"loss": 1.009, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.182674882203697, |
|
"grad_norm": 2.5088376984194682, |
|
"learning_rate": 1.880706854016166e-05, |
|
"loss": 1.0838, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.18339978252990213, |
|
"grad_norm": 2.095009396399693, |
|
"learning_rate": 1.8795914452681362e-05, |
|
"loss": 1.0487, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.18412468285610728, |
|
"grad_norm": 2.3982978050324073, |
|
"learning_rate": 1.8784711800834564e-05, |
|
"loss": 1.0634, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.18484958318231243, |
|
"grad_norm": 2.1820663845429524, |
|
"learning_rate": 1.877346064647382e-05, |
|
"loss": 1.0621, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.18557448350851757, |
|
"grad_norm": 2.9905602073691147, |
|
"learning_rate": 1.8762161051719464e-05, |
|
"loss": 1.2322, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.18629938383472272, |
|
"grad_norm": 2.2435538559731505, |
|
"learning_rate": 1.8750813078959282e-05, |
|
"loss": 1.0788, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.18702428416092787, |
|
"grad_norm": 2.2367444808107906, |
|
"learning_rate": 1.8739416790848177e-05, |
|
"loss": 1.0598, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.18774918448713301, |
|
"grad_norm": 2.472646715254004, |
|
"learning_rate": 1.87279722503078e-05, |
|
"loss": 1.0482, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.18847408481333816, |
|
"grad_norm": 2.111989985490911, |
|
"learning_rate": 1.8716479520526227e-05, |
|
"loss": 1.1027, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1891989851395433, |
|
"grad_norm": 2.510087294479276, |
|
"learning_rate": 1.870493866495759e-05, |
|
"loss": 1.1856, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.18992388546574845, |
|
"grad_norm": 2.5133146185218904, |
|
"learning_rate": 1.8693349747321737e-05, |
|
"loss": 1.1478, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.1906487857919536, |
|
"grad_norm": 2.3395156482299972, |
|
"learning_rate": 1.868171283160388e-05, |
|
"loss": 1.1815, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.19137368611815875, |
|
"grad_norm": 2.3764168972959707, |
|
"learning_rate": 1.867002798205424e-05, |
|
"loss": 1.2432, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.1920985864443639, |
|
"grad_norm": 2.559923113432804, |
|
"learning_rate": 1.8658295263187692e-05, |
|
"loss": 1.1154, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.19282348677056904, |
|
"grad_norm": 2.8783556981475544, |
|
"learning_rate": 1.8646514739783404e-05, |
|
"loss": 1.1236, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 2.6502413673324985, |
|
"learning_rate": 1.8634686476884497e-05, |
|
"loss": 1.2676, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.19427328742297933, |
|
"grad_norm": 3.3520733595809857, |
|
"learning_rate": 1.8622810539797656e-05, |
|
"loss": 1.0714, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.19499818774918448, |
|
"grad_norm": 2.9157405583997353, |
|
"learning_rate": 1.8610886994092802e-05, |
|
"loss": 1.1882, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.19572308807538963, |
|
"grad_norm": 2.118069033541846, |
|
"learning_rate": 1.8598915905602704e-05, |
|
"loss": 1.1082, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19644798840159478, |
|
"grad_norm": 2.3662623126360693, |
|
"learning_rate": 1.858689734042263e-05, |
|
"loss": 1.0517, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.19717288872779992, |
|
"grad_norm": 2.3124998904314658, |
|
"learning_rate": 1.8574831364909988e-05, |
|
"loss": 1.1954, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.19789778905400507, |
|
"grad_norm": 2.2466176708447576, |
|
"learning_rate": 1.8562718045683933e-05, |
|
"loss": 1.153, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.19862268938021022, |
|
"grad_norm": 3.186725132878171, |
|
"learning_rate": 1.855055744962502e-05, |
|
"loss": 1.0749, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.19934758970641536, |
|
"grad_norm": 1.9797977750729439, |
|
"learning_rate": 1.8538349643874845e-05, |
|
"loss": 1.1717, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2000724900326205, |
|
"grad_norm": 2.650096534254559, |
|
"learning_rate": 1.8526094695835647e-05, |
|
"loss": 1.0998, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.20079739035882566, |
|
"grad_norm": 2.242222036665605, |
|
"learning_rate": 1.851379267316995e-05, |
|
"loss": 1.12, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2015222906850308, |
|
"grad_norm": 3.0768668231596763, |
|
"learning_rate": 1.8501443643800185e-05, |
|
"loss": 1.2119, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.20224719101123595, |
|
"grad_norm": 2.4142131380970655, |
|
"learning_rate": 1.8489047675908328e-05, |
|
"loss": 1.1211, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2029720913374411, |
|
"grad_norm": 2.494879063057067, |
|
"learning_rate": 1.8476604837935515e-05, |
|
"loss": 1.1135, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.20369699166364624, |
|
"grad_norm": 2.105871660944368, |
|
"learning_rate": 1.846411519858165e-05, |
|
"loss": 1.1952, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.2044218919898514, |
|
"grad_norm": 2.2793551122599847, |
|
"learning_rate": 1.8451578826805046e-05, |
|
"loss": 1.148, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.20514679231605654, |
|
"grad_norm": 2.3515542983789794, |
|
"learning_rate": 1.843899579182204e-05, |
|
"loss": 1.078, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.20587169264226168, |
|
"grad_norm": 2.3215932574696043, |
|
"learning_rate": 1.8426366163106603e-05, |
|
"loss": 0.9794, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.20659659296846683, |
|
"grad_norm": 2.9004041098455122, |
|
"learning_rate": 1.841369001038997e-05, |
|
"loss": 1.1092, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.20732149329467198, |
|
"grad_norm": 2.503251753777213, |
|
"learning_rate": 1.8400967403660228e-05, |
|
"loss": 1.1586, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.20804639362087712, |
|
"grad_norm": 2.6266283769571546, |
|
"learning_rate": 1.8388198413161962e-05, |
|
"loss": 1.1247, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.20877129394708227, |
|
"grad_norm": 2.6053044014908644, |
|
"learning_rate": 1.837538310939586e-05, |
|
"loss": 1.0471, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.20949619427328742, |
|
"grad_norm": 2.2521209365507766, |
|
"learning_rate": 1.8362521563118298e-05, |
|
"loss": 1.0282, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.21022109459949256, |
|
"grad_norm": 2.086975827241926, |
|
"learning_rate": 1.834961384534098e-05, |
|
"loss": 1.1105, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2109459949256977, |
|
"grad_norm": 2.282332985227685, |
|
"learning_rate": 1.8336660027330525e-05, |
|
"loss": 1.1147, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.21167089525190286, |
|
"grad_norm": 2.4541255599254326, |
|
"learning_rate": 1.83236601806081e-05, |
|
"loss": 1.0968, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.212395795578108, |
|
"grad_norm": 2.5576128314214763, |
|
"learning_rate": 1.8310614376948986e-05, |
|
"loss": 1.2477, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.21312069590431315, |
|
"grad_norm": 2.073918446338343, |
|
"learning_rate": 1.829752268838222e-05, |
|
"loss": 1.0705, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.2138455962305183, |
|
"grad_norm": 3.0383016771932168, |
|
"learning_rate": 1.8284385187190168e-05, |
|
"loss": 1.2738, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.21457049655672344, |
|
"grad_norm": 2.5327376795510714, |
|
"learning_rate": 1.8271201945908152e-05, |
|
"loss": 1.0948, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2152953968829286, |
|
"grad_norm": 2.913601708623835, |
|
"learning_rate": 1.825797303732402e-05, |
|
"loss": 1.1031, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.21602029720913374, |
|
"grad_norm": 2.7092322107658933, |
|
"learning_rate": 1.8244698534477776e-05, |
|
"loss": 1.0646, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.21674519753533888, |
|
"grad_norm": 2.2272013984412418, |
|
"learning_rate": 1.823137851066115e-05, |
|
"loss": 0.9988, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.21747009786154403, |
|
"grad_norm": 1.897477153962133, |
|
"learning_rate": 1.821801303941721e-05, |
|
"loss": 1.086, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21819499818774918, |
|
"grad_norm": 2.055450943656558, |
|
"learning_rate": 1.8204602194539948e-05, |
|
"loss": 1.1499, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.21891989851395433, |
|
"grad_norm": 2.4909766282023016, |
|
"learning_rate": 1.819114605007388e-05, |
|
"loss": 1.1902, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.21964479884015947, |
|
"grad_norm": 2.155603573794576, |
|
"learning_rate": 1.8177644680313618e-05, |
|
"loss": 1.1291, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.22036969916636462, |
|
"grad_norm": 2.3903217112059845, |
|
"learning_rate": 1.8164098159803485e-05, |
|
"loss": 1.074, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.22109459949256977, |
|
"grad_norm": 2.4252393061914566, |
|
"learning_rate": 1.8150506563337095e-05, |
|
"loss": 1.1277, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2218194998187749, |
|
"grad_norm": 2.390835505304097, |
|
"learning_rate": 1.813686996595693e-05, |
|
"loss": 1.0942, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.22254440014498006, |
|
"grad_norm": 2.3599664718135087, |
|
"learning_rate": 1.8123188442953935e-05, |
|
"loss": 1.1321, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.2232693004711852, |
|
"grad_norm": 3.4063853490696356, |
|
"learning_rate": 1.81094620698671e-05, |
|
"loss": 1.2013, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.22399420079739035, |
|
"grad_norm": 2.8215352516328682, |
|
"learning_rate": 1.809569092248304e-05, |
|
"loss": 1.1624, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.2247191011235955, |
|
"grad_norm": 2.692738685235975, |
|
"learning_rate": 1.8081875076835587e-05, |
|
"loss": 1.2877, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.22544400144980065, |
|
"grad_norm": 1.9994403713749573, |
|
"learning_rate": 1.8068014609205363e-05, |
|
"loss": 1.1385, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2261689017760058, |
|
"grad_norm": 2.6062186167459864, |
|
"learning_rate": 1.805410959611935e-05, |
|
"loss": 1.17, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.22689380210221094, |
|
"grad_norm": 2.471082211880037, |
|
"learning_rate": 1.804016011435048e-05, |
|
"loss": 1.1468, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2276187024284161, |
|
"grad_norm": 2.685017007295627, |
|
"learning_rate": 1.802616624091721e-05, |
|
"loss": 1.1734, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.22834360275462123, |
|
"grad_norm": 2.462530495353453, |
|
"learning_rate": 1.8012128053083097e-05, |
|
"loss": 1.1009, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.22906850308082638, |
|
"grad_norm": 2.2431993395283767, |
|
"learning_rate": 1.7998045628356355e-05, |
|
"loss": 1.1612, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.22979340340703153, |
|
"grad_norm": 2.3899511245334133, |
|
"learning_rate": 1.7983919044489455e-05, |
|
"loss": 1.1758, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.23051830373323667, |
|
"grad_norm": 2.259336195556954, |
|
"learning_rate": 1.7969748379478675e-05, |
|
"loss": 1.1089, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.23124320405944182, |
|
"grad_norm": 3.0578949924857013, |
|
"learning_rate": 1.795553371156368e-05, |
|
"loss": 1.2194, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.23196810438564697, |
|
"grad_norm": 2.4251956933972867, |
|
"learning_rate": 1.7941275119227076e-05, |
|
"loss": 1.1154, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2326930047118521, |
|
"grad_norm": 2.3987899466375344, |
|
"learning_rate": 1.7926972681193996e-05, |
|
"loss": 1.1155, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.23341790503805726, |
|
"grad_norm": 2.0179495834873395, |
|
"learning_rate": 1.7912626476431648e-05, |
|
"loss": 1.1214, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2341428053642624, |
|
"grad_norm": 2.2577973639628492, |
|
"learning_rate": 1.78982365841489e-05, |
|
"loss": 1.145, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.23486770569046755, |
|
"grad_norm": 2.5258056683443857, |
|
"learning_rate": 1.788380308379581e-05, |
|
"loss": 1.0736, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.2355926060166727, |
|
"grad_norm": 2.597512148415182, |
|
"learning_rate": 1.7869326055063223e-05, |
|
"loss": 1.0742, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.23631750634287785, |
|
"grad_norm": 2.4111146476669063, |
|
"learning_rate": 1.7854805577882307e-05, |
|
"loss": 1.1139, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.237042406669083, |
|
"grad_norm": 2.513312885502925, |
|
"learning_rate": 1.784024173242412e-05, |
|
"loss": 1.1792, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.23776730699528814, |
|
"grad_norm": 2.6103960099654446, |
|
"learning_rate": 1.7825634599099167e-05, |
|
"loss": 1.0752, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2384922073214933, |
|
"grad_norm": 2.5153496087422136, |
|
"learning_rate": 1.7810984258556955e-05, |
|
"loss": 1.1024, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.23921710764769843, |
|
"grad_norm": 1.891594673564252, |
|
"learning_rate": 1.779629079168556e-05, |
|
"loss": 0.9794, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23994200797390358, |
|
"grad_norm": 2.103561148427696, |
|
"learning_rate": 1.7781554279611143e-05, |
|
"loss": 0.9872, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.24066690830010873, |
|
"grad_norm": 2.258815019112651, |
|
"learning_rate": 1.7766774803697555e-05, |
|
"loss": 1.1095, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.24139180862631388, |
|
"grad_norm": 2.0559334388087938, |
|
"learning_rate": 1.7751952445545846e-05, |
|
"loss": 1.1, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.24211670895251902, |
|
"grad_norm": 2.382861239464527, |
|
"learning_rate": 1.7737087286993832e-05, |
|
"loss": 1.118, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.24284160927872417, |
|
"grad_norm": 2.0365687571462536, |
|
"learning_rate": 1.7722179410115644e-05, |
|
"loss": 1.1233, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.24356650960492932, |
|
"grad_norm": 2.5340895730401036, |
|
"learning_rate": 1.770722889722126e-05, |
|
"loss": 1.2206, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.24429140993113446, |
|
"grad_norm": 2.2828107095058865, |
|
"learning_rate": 1.769223583085608e-05, |
|
"loss": 1.144, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.2450163102573396, |
|
"grad_norm": 2.7476805734384495, |
|
"learning_rate": 1.767720029380044e-05, |
|
"loss": 1.0737, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.24574121058354476, |
|
"grad_norm": 2.1237162836325068, |
|
"learning_rate": 1.7662122369069164e-05, |
|
"loss": 1.0684, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.2464661109097499, |
|
"grad_norm": 2.5061834120195488, |
|
"learning_rate": 1.764700213991111e-05, |
|
"loss": 1.1518, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.24719101123595505, |
|
"grad_norm": 2.3826699101861353, |
|
"learning_rate": 1.7631839689808724e-05, |
|
"loss": 1.0676, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.2479159115621602, |
|
"grad_norm": 2.8748822951105235, |
|
"learning_rate": 1.7616635102477545e-05, |
|
"loss": 1.1453, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.24864081188836534, |
|
"grad_norm": 2.2078908610481434, |
|
"learning_rate": 1.760138846186577e-05, |
|
"loss": 0.9728, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.2493657122145705, |
|
"grad_norm": 2.750380120949678, |
|
"learning_rate": 1.7586099852153778e-05, |
|
"loss": 1.194, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.25009061254077564, |
|
"grad_norm": 1.9828736953069481, |
|
"learning_rate": 1.7570769357753682e-05, |
|
"loss": 1.122, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.25009061254077564, |
|
"eval_loss": 1.9538915157318115, |
|
"eval_runtime": 1129.0599, |
|
"eval_samples_per_second": 15.636, |
|
"eval_steps_per_second": 0.489, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2508155128669808, |
|
"grad_norm": 2.4573515309403184, |
|
"learning_rate": 1.755539706330883e-05, |
|
"loss": 1.1254, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.25154041319318593, |
|
"grad_norm": 2.382471730290679, |
|
"learning_rate": 1.753998305369338e-05, |
|
"loss": 1.0609, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2522653135193911, |
|
"grad_norm": 2.0670497345618624, |
|
"learning_rate": 1.752452741401179e-05, |
|
"loss": 1.11, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2529902138455962, |
|
"grad_norm": 2.826479622570429, |
|
"learning_rate": 1.7509030229598387e-05, |
|
"loss": 1.0688, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.25371511417180137, |
|
"grad_norm": 2.4410246210380797, |
|
"learning_rate": 1.749349158601686e-05, |
|
"loss": 1.0848, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2544400144980065, |
|
"grad_norm": 2.1500648572615226, |
|
"learning_rate": 1.7477911569059808e-05, |
|
"loss": 1.1225, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.25516491482421166, |
|
"grad_norm": 2.303703660619773, |
|
"learning_rate": 1.746229026474827e-05, |
|
"loss": 1.0781, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2558898151504168, |
|
"grad_norm": 2.6829872427143853, |
|
"learning_rate": 1.744662775933123e-05, |
|
"loss": 1.0742, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.25661471547662196, |
|
"grad_norm": 2.2217416215597448, |
|
"learning_rate": 1.743092413928517e-05, |
|
"loss": 1.142, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2573396158028271, |
|
"grad_norm": 2.6300226414802594, |
|
"learning_rate": 1.741517949131356e-05, |
|
"loss": 1.0869, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 2.094472680631636, |
|
"learning_rate": 1.7399393902346403e-05, |
|
"loss": 1.0797, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2587894164552374, |
|
"grad_norm": 2.6696223277650097, |
|
"learning_rate": 1.738356745953975e-05, |
|
"loss": 1.1725, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.25951431678144254, |
|
"grad_norm": 2.560913641389733, |
|
"learning_rate": 1.7367700250275196e-05, |
|
"loss": 1.0524, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.2602392171076477, |
|
"grad_norm": 2.499190502628081, |
|
"learning_rate": 1.735179236215945e-05, |
|
"loss": 1.0864, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.26096411743385284, |
|
"grad_norm": 2.1752493153518127, |
|
"learning_rate": 1.7335843883023792e-05, |
|
"loss": 1.1682, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.261689017760058, |
|
"grad_norm": 2.193115771884426, |
|
"learning_rate": 1.7319854900923627e-05, |
|
"loss": 1.1538, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.26241391808626313, |
|
"grad_norm": 3.6037027339305228, |
|
"learning_rate": 1.7303825504137985e-05, |
|
"loss": 1.2803, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2631388184124683, |
|
"grad_norm": 2.151895072296486, |
|
"learning_rate": 1.728775578116903e-05, |
|
"loss": 1.027, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2638637187386734, |
|
"grad_norm": 2.0474799790136315, |
|
"learning_rate": 1.7271645820741586e-05, |
|
"loss": 1.0166, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.26458861906487857, |
|
"grad_norm": 1.9599613314591962, |
|
"learning_rate": 1.7255495711802627e-05, |
|
"loss": 1.022, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.2653135193910837, |
|
"grad_norm": 2.401594401551547, |
|
"learning_rate": 1.723930554352081e-05, |
|
"loss": 1.1917, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.26603841971728887, |
|
"grad_norm": 2.642087840213965, |
|
"learning_rate": 1.7223075405285956e-05, |
|
"loss": 1.2054, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.266763320043494, |
|
"grad_norm": 2.18604834719014, |
|
"learning_rate": 1.7206805386708572e-05, |
|
"loss": 1.1475, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.26748822036969916, |
|
"grad_norm": 2.0338413719031507, |
|
"learning_rate": 1.7190495577619364e-05, |
|
"loss": 1.1028, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.2682131206959043, |
|
"grad_norm": 2.1711132034769394, |
|
"learning_rate": 1.717414606806872e-05, |
|
"loss": 1.1044, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.26893802102210945, |
|
"grad_norm": 1.930408817282508, |
|
"learning_rate": 1.715775694832623e-05, |
|
"loss": 1.1185, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.2696629213483146, |
|
"grad_norm": 2.1748455201638643, |
|
"learning_rate": 1.7141328308880178e-05, |
|
"loss": 1.0437, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.27038782167451975, |
|
"grad_norm": 2.487178508999438, |
|
"learning_rate": 1.712486024043705e-05, |
|
"loss": 1.1261, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.2711127220007249, |
|
"grad_norm": 2.6620415854141077, |
|
"learning_rate": 1.7108352833921022e-05, |
|
"loss": 1.1125, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.27183762232693004, |
|
"grad_norm": 2.4779294920405333, |
|
"learning_rate": 1.7091806180473472e-05, |
|
"loss": 1.1376, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2725625226531352, |
|
"grad_norm": 2.1914137672000433, |
|
"learning_rate": 1.7075220371452465e-05, |
|
"loss": 1.2255, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.27328742297934033, |
|
"grad_norm": 2.2406112575154404, |
|
"learning_rate": 1.705859549843226e-05, |
|
"loss": 1.1511, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2740123233055455, |
|
"grad_norm": 2.0333226545718914, |
|
"learning_rate": 1.7041931653202788e-05, |
|
"loss": 1.1184, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.2747372236317506, |
|
"grad_norm": 2.8707122599293875, |
|
"learning_rate": 1.702522892776916e-05, |
|
"loss": 1.0367, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.2754621239579558, |
|
"grad_norm": 2.242723836798557, |
|
"learning_rate": 1.7008487414351163e-05, |
|
"loss": 1.1647, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2761870242841609, |
|
"grad_norm": 3.052876110953187, |
|
"learning_rate": 1.6991707205382723e-05, |
|
"loss": 1.1574, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.27691192461036607, |
|
"grad_norm": 2.3302064287160955, |
|
"learning_rate": 1.6974888393511425e-05, |
|
"loss": 1.0833, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.2776368249365712, |
|
"grad_norm": 2.362447133340326, |
|
"learning_rate": 1.695803107159799e-05, |
|
"loss": 1.0142, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.27836172526277636, |
|
"grad_norm": 1.9262635584689904, |
|
"learning_rate": 1.694113533271576e-05, |
|
"loss": 0.9974, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.2790866255889815, |
|
"grad_norm": 2.6725693558247943, |
|
"learning_rate": 1.6924201270150194e-05, |
|
"loss": 1.1584, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.27981152591518665, |
|
"grad_norm": 2.3758190016297855, |
|
"learning_rate": 1.6907228977398324e-05, |
|
"loss": 1.1007, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.2805364262413918, |
|
"grad_norm": 2.02794227684942, |
|
"learning_rate": 1.689021854816829e-05, |
|
"loss": 0.9606, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.28126132656759695, |
|
"grad_norm": 2.182217806815362, |
|
"learning_rate": 1.6873170076378764e-05, |
|
"loss": 1.1012, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.2819862268938021, |
|
"grad_norm": 2.8274640981113714, |
|
"learning_rate": 1.685608365615848e-05, |
|
"loss": 1.1609, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.28271112722000724, |
|
"grad_norm": 2.0005395310858165, |
|
"learning_rate": 1.683895938184569e-05, |
|
"loss": 1.1083, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2834360275462124, |
|
"grad_norm": 2.5469245481992195, |
|
"learning_rate": 1.6821797347987637e-05, |
|
"loss": 1.1813, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.28416092787241753, |
|
"grad_norm": 2.250275964552977, |
|
"learning_rate": 1.680459764934006e-05, |
|
"loss": 1.0717, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.2848858281986227, |
|
"grad_norm": 2.1541186966473314, |
|
"learning_rate": 1.678736038086664e-05, |
|
"loss": 1.0937, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.28561072852482783, |
|
"grad_norm": 2.2526083463595694, |
|
"learning_rate": 1.67700856377385e-05, |
|
"loss": 1.1692, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.286335628851033, |
|
"grad_norm": 2.360291496431212, |
|
"learning_rate": 1.6752773515333665e-05, |
|
"loss": 1.0956, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2870605291772381, |
|
"grad_norm": 2.7484396524462693, |
|
"learning_rate": 1.6735424109236538e-05, |
|
"loss": 1.0348, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.28778542950344327, |
|
"grad_norm": 1.991944001354783, |
|
"learning_rate": 1.671803751523738e-05, |
|
"loss": 1.0371, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.2885103298296484, |
|
"grad_norm": 2.1121316874137794, |
|
"learning_rate": 1.6700613829331778e-05, |
|
"loss": 1.0483, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.28923523015585356, |
|
"grad_norm": 2.9850153218884405, |
|
"learning_rate": 1.6683153147720098e-05, |
|
"loss": 1.2002, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.2899601304820587, |
|
"grad_norm": 2.2268590912100987, |
|
"learning_rate": 1.666565556680698e-05, |
|
"loss": 1.1616, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29068503080826386, |
|
"grad_norm": 2.135948670713283, |
|
"learning_rate": 1.664812118320079e-05, |
|
"loss": 0.9466, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.291409931134469, |
|
"grad_norm": 2.8208944014300603, |
|
"learning_rate": 1.6630550093713096e-05, |
|
"loss": 1.1814, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.29213483146067415, |
|
"grad_norm": 2.225373597938535, |
|
"learning_rate": 1.661294239535812e-05, |
|
"loss": 1.0444, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.2928597317868793, |
|
"grad_norm": 2.253251092388969, |
|
"learning_rate": 1.6595298185352216e-05, |
|
"loss": 1.092, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.29358463211308444, |
|
"grad_norm": 2.46062887701134, |
|
"learning_rate": 1.6577617561113322e-05, |
|
"loss": 1.0818, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2943095324392896, |
|
"grad_norm": 3.129489147993457, |
|
"learning_rate": 1.6559900620260435e-05, |
|
"loss": 1.1676, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.29503443276549474, |
|
"grad_norm": 2.063039937217649, |
|
"learning_rate": 1.654214746061306e-05, |
|
"loss": 1.1648, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.2957593330916999, |
|
"grad_norm": 2.4465593098747256, |
|
"learning_rate": 1.652435818019068e-05, |
|
"loss": 1.0201, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.29648423341790503, |
|
"grad_norm": 2.399324464811924, |
|
"learning_rate": 1.6506532877212195e-05, |
|
"loss": 1.0413, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.2972091337441102, |
|
"grad_norm": 2.6482971890623546, |
|
"learning_rate": 1.6488671650095416e-05, |
|
"loss": 1.105, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2979340340703153, |
|
"grad_norm": 3.034059815258785, |
|
"learning_rate": 1.6470774597456482e-05, |
|
"loss": 1.066, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.29865893439652047, |
|
"grad_norm": 2.0229310257745725, |
|
"learning_rate": 1.6452841818109342e-05, |
|
"loss": 1.0126, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.2993838347227256, |
|
"grad_norm": 2.09062623797439, |
|
"learning_rate": 1.64348734110652e-05, |
|
"loss": 1.0359, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.30010873504893076, |
|
"grad_norm": 2.261696086833709, |
|
"learning_rate": 1.6416869475531964e-05, |
|
"loss": 1.0387, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.3008336353751359, |
|
"grad_norm": 2.0194176082613686, |
|
"learning_rate": 1.639883011091371e-05, |
|
"loss": 1.0375, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.30155853570134106, |
|
"grad_norm": 3.0655992063503392, |
|
"learning_rate": 1.6380755416810125e-05, |
|
"loss": 1.1433, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.3022834360275462, |
|
"grad_norm": 2.200388293782514, |
|
"learning_rate": 1.6362645493015955e-05, |
|
"loss": 1.0384, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.30300833635375135, |
|
"grad_norm": 2.2340973013167646, |
|
"learning_rate": 1.634450043952046e-05, |
|
"loss": 1.1233, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.3037332366799565, |
|
"grad_norm": 2.4431864602307334, |
|
"learning_rate": 1.6326320356506858e-05, |
|
"loss": 1.1127, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.30445813700616164, |
|
"grad_norm": 2.5340829203747997, |
|
"learning_rate": 1.6308105344351776e-05, |
|
"loss": 1.1336, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3051830373323668, |
|
"grad_norm": 2.4621749417333865, |
|
"learning_rate": 1.6289855503624694e-05, |
|
"loss": 1.18, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.30590793765857194, |
|
"grad_norm": 2.435171083757431, |
|
"learning_rate": 1.6271570935087385e-05, |
|
"loss": 1.1034, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.3066328379847771, |
|
"grad_norm": 2.56776282560331, |
|
"learning_rate": 1.6253251739693366e-05, |
|
"loss": 1.1227, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.30735773831098223, |
|
"grad_norm": 2.6606550385377172, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 1.0606, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3080826386371874, |
|
"grad_norm": 2.3905958945143584, |
|
"learning_rate": 1.621650987310462e-05, |
|
"loss": 1.0924, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3088075389633925, |
|
"grad_norm": 2.16146889842216, |
|
"learning_rate": 1.6198087404770602e-05, |
|
"loss": 1.0924, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.30953243928959767, |
|
"grad_norm": 2.511397106934819, |
|
"learning_rate": 1.617963071530018e-05, |
|
"loss": 1.1366, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3102573396158028, |
|
"grad_norm": 2.394538554344066, |
|
"learning_rate": 1.616113990659719e-05, |
|
"loss": 1.0009, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.31098223994200797, |
|
"grad_norm": 2.4259004458514006, |
|
"learning_rate": 1.6142615080753846e-05, |
|
"loss": 1.1315, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3117071402682131, |
|
"grad_norm": 2.6037915854967726, |
|
"learning_rate": 1.6124056340050186e-05, |
|
"loss": 1.0235, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.31243204059441826, |
|
"grad_norm": 2.260088913738152, |
|
"learning_rate": 1.6105463786953502e-05, |
|
"loss": 1.016, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3131569409206234, |
|
"grad_norm": 1.9542058053758606, |
|
"learning_rate": 1.6086837524117755e-05, |
|
"loss": 1.0546, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.31388184124682855, |
|
"grad_norm": 2.279040211460153, |
|
"learning_rate": 1.6068177654383042e-05, |
|
"loss": 1.1363, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3146067415730337, |
|
"grad_norm": 2.4083685524014933, |
|
"learning_rate": 1.6049484280775012e-05, |
|
"loss": 1.0617, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.31533164189923885, |
|
"grad_norm": 2.3755540476745844, |
|
"learning_rate": 1.6030757506504284e-05, |
|
"loss": 1.0368, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.316056542225444, |
|
"grad_norm": 2.1752306039124565, |
|
"learning_rate": 1.6011997434965906e-05, |
|
"loss": 1.0696, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.31678144255164914, |
|
"grad_norm": 2.0792526403603557, |
|
"learning_rate": 1.5993204169738757e-05, |
|
"loss": 1.0583, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.3175063428778543, |
|
"grad_norm": 2.1158935057351838, |
|
"learning_rate": 1.5974377814584986e-05, |
|
"loss": 1.1241, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.31823124320405943, |
|
"grad_norm": 1.7878941702995481, |
|
"learning_rate": 1.5955518473449448e-05, |
|
"loss": 1.1129, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.3189561435302646, |
|
"grad_norm": 2.1238392656978404, |
|
"learning_rate": 1.593662625045912e-05, |
|
"loss": 1.0596, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3196810438564697, |
|
"grad_norm": 2.1813159053603104, |
|
"learning_rate": 1.591770124992252e-05, |
|
"loss": 1.1104, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.3204059441826749, |
|
"grad_norm": 1.8368381622027763, |
|
"learning_rate": 1.589874357632914e-05, |
|
"loss": 1.0579, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.32113084450888, |
|
"grad_norm": 2.233702212234041, |
|
"learning_rate": 1.587975333434888e-05, |
|
"loss": 1.1181, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.32185574483508517, |
|
"grad_norm": 2.112351085565823, |
|
"learning_rate": 1.586073062883144e-05, |
|
"loss": 1.028, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 3.4664697517256595, |
|
"learning_rate": 1.5841675564805773e-05, |
|
"loss": 1.0466, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.32330554548749546, |
|
"grad_norm": 1.9010894055087597, |
|
"learning_rate": 1.582258824747949e-05, |
|
"loss": 1.0831, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.3240304458137006, |
|
"grad_norm": 2.9306779332036714, |
|
"learning_rate": 1.580346878223827e-05, |
|
"loss": 1.2466, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.32475534613990575, |
|
"grad_norm": 1.9192009119584101, |
|
"learning_rate": 1.5784317274645294e-05, |
|
"loss": 1.1088, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.3254802464661109, |
|
"grad_norm": 2.242207905403273, |
|
"learning_rate": 1.576513383044066e-05, |
|
"loss": 1.1625, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.32620514679231605, |
|
"grad_norm": 2.224398160967933, |
|
"learning_rate": 1.5745918555540796e-05, |
|
"loss": 1.1488, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3269300471185212, |
|
"grad_norm": 2.0861425073253503, |
|
"learning_rate": 1.572667155603787e-05, |
|
"loss": 0.9955, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.32765494744472634, |
|
"grad_norm": 1.92493109968563, |
|
"learning_rate": 1.5707392938199208e-05, |
|
"loss": 0.9444, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.3283798477709315, |
|
"grad_norm": 1.9283323786352506, |
|
"learning_rate": 1.5688082808466714e-05, |
|
"loss": 0.9664, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.32910474809713663, |
|
"grad_norm": 2.5055746794884093, |
|
"learning_rate": 1.566874127345628e-05, |
|
"loss": 1.0542, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.3298296484233418, |
|
"grad_norm": 2.4180849774657784, |
|
"learning_rate": 1.5649368439957182e-05, |
|
"loss": 1.1004, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.33055454874954693, |
|
"grad_norm": 3.1969300216088574, |
|
"learning_rate": 1.562996441493151e-05, |
|
"loss": 1.1926, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.3312794490757521, |
|
"grad_norm": 1.8450416397567433, |
|
"learning_rate": 1.561052930551357e-05, |
|
"loss": 0.9204, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.3320043494019572, |
|
"grad_norm": 2.1352988507915076, |
|
"learning_rate": 1.5591063219009296e-05, |
|
"loss": 1.1082, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.33272924972816237, |
|
"grad_norm": 2.0068487127258563, |
|
"learning_rate": 1.5571566262895637e-05, |
|
"loss": 1.0769, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.3334541500543675, |
|
"grad_norm": 2.158036051650137, |
|
"learning_rate": 1.5552038544820008e-05, |
|
"loss": 1.0965, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.33417905038057266, |
|
"grad_norm": 2.260378271679646, |
|
"learning_rate": 1.5532480172599646e-05, |
|
"loss": 1.08, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.3349039507067778, |
|
"grad_norm": 2.0768476079893317, |
|
"learning_rate": 1.5512891254221046e-05, |
|
"loss": 1.0548, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.33562885103298296, |
|
"grad_norm": 2.227359781654928, |
|
"learning_rate": 1.549327189783935e-05, |
|
"loss": 1.0364, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.3363537513591881, |
|
"grad_norm": 2.008303234643065, |
|
"learning_rate": 1.5473622211777763e-05, |
|
"loss": 1.0596, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.33707865168539325, |
|
"grad_norm": 2.1053343622780525, |
|
"learning_rate": 1.5453942304526945e-05, |
|
"loss": 1.0106, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3378035520115984, |
|
"grad_norm": 2.2528412924953645, |
|
"learning_rate": 1.543423228474441e-05, |
|
"loss": 1.1579, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.33852845233780354, |
|
"grad_norm": 2.098527027839517, |
|
"learning_rate": 1.5414492261253937e-05, |
|
"loss": 1.0913, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.3392533526640087, |
|
"grad_norm": 2.3910084145103383, |
|
"learning_rate": 1.539472234304496e-05, |
|
"loss": 1.0481, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.33997825299021384, |
|
"grad_norm": 2.0843546569961937, |
|
"learning_rate": 1.537492263927196e-05, |
|
"loss": 1.1351, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.340703153316419, |
|
"grad_norm": 2.0698901517851294, |
|
"learning_rate": 1.5355093259253897e-05, |
|
"loss": 1.1095, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.34142805364262413, |
|
"grad_norm": 2.2916177664815867, |
|
"learning_rate": 1.533523431247355e-05, |
|
"loss": 1.094, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.3421529539688293, |
|
"grad_norm": 2.000471695373447, |
|
"learning_rate": 1.531534590857696e-05, |
|
"loss": 1.0647, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.3428778542950344, |
|
"grad_norm": 2.048745212856722, |
|
"learning_rate": 1.529542815737281e-05, |
|
"loss": 1.0448, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.34360275462123957, |
|
"grad_norm": 2.104486287385562, |
|
"learning_rate": 1.52754811688318e-05, |
|
"loss": 1.0525, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.3443276549474447, |
|
"grad_norm": 2.172054502687611, |
|
"learning_rate": 1.525550505308608e-05, |
|
"loss": 1.0437, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.34505255527364986, |
|
"grad_norm": 2.1612251883461355, |
|
"learning_rate": 1.52354999204286e-05, |
|
"loss": 1.1142, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.345777455599855, |
|
"grad_norm": 2.5598958893453987, |
|
"learning_rate": 1.5215465881312526e-05, |
|
"loss": 1.0597, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.34650235592606016, |
|
"grad_norm": 2.1402492854032875, |
|
"learning_rate": 1.519540304635062e-05, |
|
"loss": 1.0678, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.3472272562522653, |
|
"grad_norm": 2.386151874903598, |
|
"learning_rate": 1.5175311526314642e-05, |
|
"loss": 1.1297, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.34795215657847045, |
|
"grad_norm": 2.8147411958053734, |
|
"learning_rate": 1.5155191432134721e-05, |
|
"loss": 1.047, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3486770569046756, |
|
"grad_norm": 2.185563612843593, |
|
"learning_rate": 1.513504287489875e-05, |
|
"loss": 1.0674, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.34940195723088074, |
|
"grad_norm": 2.1264088878183705, |
|
"learning_rate": 1.5114865965851779e-05, |
|
"loss": 1.1097, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.3501268575570859, |
|
"grad_norm": 2.294219589338203, |
|
"learning_rate": 1.509466081639539e-05, |
|
"loss": 0.9861, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.35085175788329104, |
|
"grad_norm": 2.2845015506312474, |
|
"learning_rate": 1.507442753808709e-05, |
|
"loss": 1.0646, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.3515766582094962, |
|
"grad_norm": 2.1560916077937606, |
|
"learning_rate": 1.5054166242639694e-05, |
|
"loss": 1.0056, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.35230155853570133, |
|
"grad_norm": 2.0570915443776654, |
|
"learning_rate": 1.5033877041920703e-05, |
|
"loss": 1.0926, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.3530264588619065, |
|
"grad_norm": 1.9130291874506817, |
|
"learning_rate": 1.5013560047951689e-05, |
|
"loss": 0.9783, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.3537513591881116, |
|
"grad_norm": 2.3476451826235625, |
|
"learning_rate": 1.499321537290768e-05, |
|
"loss": 1.0546, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.35447625951431677, |
|
"grad_norm": 2.3654378069877966, |
|
"learning_rate": 1.4972843129116537e-05, |
|
"loss": 1.0442, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.3552011598405219, |
|
"grad_norm": 1.9825769936244912, |
|
"learning_rate": 1.4952443429058334e-05, |
|
"loss": 1.0081, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.35592606016672707, |
|
"grad_norm": 2.2643797127980747, |
|
"learning_rate": 1.493201638536474e-05, |
|
"loss": 1.0317, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.3566509604929322, |
|
"grad_norm": 2.4516169299014465, |
|
"learning_rate": 1.4911562110818392e-05, |
|
"loss": 0.9924, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.35737586081913736, |
|
"grad_norm": 2.446561602895059, |
|
"learning_rate": 1.4891080718352275e-05, |
|
"loss": 1.0407, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.3581007611453425, |
|
"grad_norm": 2.0486524427646144, |
|
"learning_rate": 1.4870572321049102e-05, |
|
"loss": 1.0797, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.35882566147154765, |
|
"grad_norm": 2.485346536887422, |
|
"learning_rate": 1.4850037032140683e-05, |
|
"loss": 0.9983, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3595505617977528, |
|
"grad_norm": 3.0439426290182094, |
|
"learning_rate": 1.4829474965007303e-05, |
|
"loss": 1.152, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.36027546212395795, |
|
"grad_norm": 2.540523235848105, |
|
"learning_rate": 1.4808886233177096e-05, |
|
"loss": 1.2146, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.3610003624501631, |
|
"grad_norm": 2.5194385485382207, |
|
"learning_rate": 1.4788270950325424e-05, |
|
"loss": 1.0989, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.36172526277636824, |
|
"grad_norm": 2.3200096809507693, |
|
"learning_rate": 1.4767629230274236e-05, |
|
"loss": 1.2226, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.3624501631025734, |
|
"grad_norm": 2.0411294097073363, |
|
"learning_rate": 1.474696118699145e-05, |
|
"loss": 1.0421, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36317506342877853, |
|
"grad_norm": 2.3819141883855597, |
|
"learning_rate": 1.4726266934590321e-05, |
|
"loss": 1.0162, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.3638999637549837, |
|
"grad_norm": 1.9177330075859071, |
|
"learning_rate": 1.4705546587328811e-05, |
|
"loss": 1.0171, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.3646248640811888, |
|
"grad_norm": 2.06883784081889, |
|
"learning_rate": 1.4684800259608958e-05, |
|
"loss": 1.0567, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.365349764407394, |
|
"grad_norm": 2.796404840199928, |
|
"learning_rate": 1.4664028065976245e-05, |
|
"loss": 1.0567, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.3660746647335991, |
|
"grad_norm": 2.0344928788193433, |
|
"learning_rate": 1.4643230121118965e-05, |
|
"loss": 1.1192, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.36679956505980427, |
|
"grad_norm": 1.931575463278115, |
|
"learning_rate": 1.4622406539867593e-05, |
|
"loss": 1.0491, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.3675244653860094, |
|
"grad_norm": 2.5445474863300106, |
|
"learning_rate": 1.460155743719414e-05, |
|
"loss": 1.0213, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.36824936571221456, |
|
"grad_norm": 2.0670836939977564, |
|
"learning_rate": 1.4580682928211545e-05, |
|
"loss": 0.92, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.3689742660384197, |
|
"grad_norm": 2.575085198850707, |
|
"learning_rate": 1.4559783128173e-05, |
|
"loss": 1.0829, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.36969916636462485, |
|
"grad_norm": 2.7113350290504723, |
|
"learning_rate": 1.4538858152471349e-05, |
|
"loss": 1.1535, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.37042406669083, |
|
"grad_norm": 2.312117149467026, |
|
"learning_rate": 1.4517908116638433e-05, |
|
"loss": 1.0897, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.37114896701703515, |
|
"grad_norm": 2.3702995154115656, |
|
"learning_rate": 1.4496933136344453e-05, |
|
"loss": 1.0635, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.3718738673432403, |
|
"grad_norm": 2.4501757498607537, |
|
"learning_rate": 1.4475933327397342e-05, |
|
"loss": 1.0603, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.37259876766944544, |
|
"grad_norm": 2.023783253482957, |
|
"learning_rate": 1.445490880574211e-05, |
|
"loss": 1.0789, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.3733236679956506, |
|
"grad_norm": 3.2864848049242648, |
|
"learning_rate": 1.4433859687460216e-05, |
|
"loss": 1.0952, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.37404856832185573, |
|
"grad_norm": 2.4191595705564737, |
|
"learning_rate": 1.4412786088768923e-05, |
|
"loss": 1.1147, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.3747734686480609, |
|
"grad_norm": 2.4447277431874825, |
|
"learning_rate": 1.439168812602065e-05, |
|
"loss": 1.0393, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.37549836897426603, |
|
"grad_norm": 1.9206969249262023, |
|
"learning_rate": 1.437056591570235e-05, |
|
"loss": 1.0022, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.3762232693004712, |
|
"grad_norm": 2.359427663223057, |
|
"learning_rate": 1.4349419574434838e-05, |
|
"loss": 1.093, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.3769481696266763, |
|
"grad_norm": 2.3010718359012103, |
|
"learning_rate": 1.4328249218972168e-05, |
|
"loss": 1.0408, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.37767306995288147, |
|
"grad_norm": 2.2594128382319596, |
|
"learning_rate": 1.4307054966200984e-05, |
|
"loss": 0.9412, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.3783979702790866, |
|
"grad_norm": 2.1054269973126747, |
|
"learning_rate": 1.4285836933139865e-05, |
|
"loss": 1.0254, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.37912287060529176, |
|
"grad_norm": 2.168937643340505, |
|
"learning_rate": 1.42645952369387e-05, |
|
"loss": 1.0537, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.3798477709314969, |
|
"grad_norm": 2.061073976619602, |
|
"learning_rate": 1.4243329994878017e-05, |
|
"loss": 1.1168, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.38057267125770206, |
|
"grad_norm": 2.299186158706926, |
|
"learning_rate": 1.4222041324368347e-05, |
|
"loss": 1.1138, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3812975715839072, |
|
"grad_norm": 2.251869960906443, |
|
"learning_rate": 1.4200729342949577e-05, |
|
"loss": 0.9248, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.38202247191011235, |
|
"grad_norm": 2.2506167583969856, |
|
"learning_rate": 1.4179394168290309e-05, |
|
"loss": 1.0757, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.3827473722363175, |
|
"grad_norm": 2.916531852779516, |
|
"learning_rate": 1.4158035918187182e-05, |
|
"loss": 1.1818, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.38347227256252264, |
|
"grad_norm": 2.3787214433218806, |
|
"learning_rate": 1.4136654710564251e-05, |
|
"loss": 1.0336, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3841971728887278, |
|
"grad_norm": 2.4765072845885068, |
|
"learning_rate": 1.4115250663472326e-05, |
|
"loss": 1.0506, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.38492207321493294, |
|
"grad_norm": 2.407842302823844, |
|
"learning_rate": 1.4093823895088315e-05, |
|
"loss": 1.1, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3856469735411381, |
|
"grad_norm": 2.3150861515619487, |
|
"learning_rate": 1.4072374523714577e-05, |
|
"loss": 1.0437, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.38637187386734323, |
|
"grad_norm": 2.0825864756582506, |
|
"learning_rate": 1.4050902667778272e-05, |
|
"loss": 1.1277, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 2.243168272872563, |
|
"learning_rate": 1.402940844583069e-05, |
|
"loss": 1.0506, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.3878216745197535, |
|
"grad_norm": 2.185661024343008, |
|
"learning_rate": 1.4007891976546627e-05, |
|
"loss": 1.088, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.38854657484595867, |
|
"grad_norm": 2.2131084797482186, |
|
"learning_rate": 1.3986353378723696e-05, |
|
"loss": 1.0654, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.3892714751721638, |
|
"grad_norm": 2.4512297593144177, |
|
"learning_rate": 1.3964792771281702e-05, |
|
"loss": 1.2112, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.38999637549836896, |
|
"grad_norm": 1.9649805513289704, |
|
"learning_rate": 1.394321027326195e-05, |
|
"loss": 1.0487, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3907212758245741, |
|
"grad_norm": 2.0806905976421985, |
|
"learning_rate": 1.392160600382663e-05, |
|
"loss": 1.1033, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.39144617615077926, |
|
"grad_norm": 2.117182296397933, |
|
"learning_rate": 1.3899980082258122e-05, |
|
"loss": 1.0957, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3921710764769844, |
|
"grad_norm": 2.4101205011369777, |
|
"learning_rate": 1.3878332627958365e-05, |
|
"loss": 1.1324, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.39289597680318955, |
|
"grad_norm": 2.0554352528605255, |
|
"learning_rate": 1.385666376044817e-05, |
|
"loss": 1.0756, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3936208771293947, |
|
"grad_norm": 2.8127997073836806, |
|
"learning_rate": 1.3834973599366588e-05, |
|
"loss": 1.203, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.39434577745559984, |
|
"grad_norm": 2.249229842195169, |
|
"learning_rate": 1.3813262264470235e-05, |
|
"loss": 1.0307, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.395070677781805, |
|
"grad_norm": 2.371447175626824, |
|
"learning_rate": 1.3791529875632628e-05, |
|
"loss": 1.1348, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.39579557810801014, |
|
"grad_norm": 2.2035574911903697, |
|
"learning_rate": 1.3769776552843532e-05, |
|
"loss": 1.1021, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.3965204784342153, |
|
"grad_norm": 2.749098638445283, |
|
"learning_rate": 1.3748002416208292e-05, |
|
"loss": 1.1599, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.39724537876042043, |
|
"grad_norm": 1.9103185758921004, |
|
"learning_rate": 1.372620758594717e-05, |
|
"loss": 0.9921, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.3979702790866256, |
|
"grad_norm": 2.1532622643710946, |
|
"learning_rate": 1.3704392182394686e-05, |
|
"loss": 1.0174, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.3986951794128307, |
|
"grad_norm": 2.0770483467925875, |
|
"learning_rate": 1.3682556325998947e-05, |
|
"loss": 1.1642, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.39942007973903587, |
|
"grad_norm": 2.0510030802176002, |
|
"learning_rate": 1.3660700137320986e-05, |
|
"loss": 1.0736, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.400144980065241, |
|
"grad_norm": 2.4143942712642583, |
|
"learning_rate": 1.3638823737034095e-05, |
|
"loss": 1.0679, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.40086988039144617, |
|
"grad_norm": 2.087827823927243, |
|
"learning_rate": 1.3616927245923157e-05, |
|
"loss": 1.077, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.4015947807176513, |
|
"grad_norm": 2.155634016832207, |
|
"learning_rate": 1.359501078488399e-05, |
|
"loss": 0.9344, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.40231968104385646, |
|
"grad_norm": 2.9410176032472104, |
|
"learning_rate": 1.357307447492266e-05, |
|
"loss": 0.9876, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.4030445813700616, |
|
"grad_norm": 2.3785327314059965, |
|
"learning_rate": 1.3551118437154833e-05, |
|
"loss": 0.8992, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.40376948169626675, |
|
"grad_norm": 2.206200464844771, |
|
"learning_rate": 1.3529142792805087e-05, |
|
"loss": 1.0472, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.4044943820224719, |
|
"grad_norm": 2.4336430655807058, |
|
"learning_rate": 1.3507147663206257e-05, |
|
"loss": 1.147, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.40521928234867705, |
|
"grad_norm": 2.0423284010474627, |
|
"learning_rate": 1.348513316979877e-05, |
|
"loss": 1.0467, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.4059441826748822, |
|
"grad_norm": 2.6261549064320215, |
|
"learning_rate": 1.346309943412995e-05, |
|
"loss": 0.954, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.40666908300108734, |
|
"grad_norm": 2.8231886882036386, |
|
"learning_rate": 1.3441046577853371e-05, |
|
"loss": 1.088, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.4073939833272925, |
|
"grad_norm": 2.5545065191928766, |
|
"learning_rate": 1.3418974722728177e-05, |
|
"loss": 1.1541, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.40811888365349763, |
|
"grad_norm": 2.714166874930808, |
|
"learning_rate": 1.3396883990618404e-05, |
|
"loss": 1.1053, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.4088437839797028, |
|
"grad_norm": 1.9784578326236464, |
|
"learning_rate": 1.3374774503492316e-05, |
|
"loss": 1.0065, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.4095686843059079, |
|
"grad_norm": 2.1486595608204357, |
|
"learning_rate": 1.3352646383421728e-05, |
|
"loss": 1.0184, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.4102935846321131, |
|
"grad_norm": 2.5242559447298483, |
|
"learning_rate": 1.3330499752581331e-05, |
|
"loss": 1.0812, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.4110184849583182, |
|
"grad_norm": 1.9089213008734895, |
|
"learning_rate": 1.3308334733248019e-05, |
|
"loss": 0.9932, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.41174338528452337, |
|
"grad_norm": 1.890310480638506, |
|
"learning_rate": 1.3286151447800211e-05, |
|
"loss": 0.9832, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.4124682856107285, |
|
"grad_norm": 2.038892051314124, |
|
"learning_rate": 1.3263950018717184e-05, |
|
"loss": 0.9662, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.41319318593693366, |
|
"grad_norm": 2.241417479572917, |
|
"learning_rate": 1.3241730568578383e-05, |
|
"loss": 1.0493, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4139180862631388, |
|
"grad_norm": 2.260815460922618, |
|
"learning_rate": 1.3219493220062756e-05, |
|
"loss": 1.0101, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.41464298658934395, |
|
"grad_norm": 2.456266733687105, |
|
"learning_rate": 1.3197238095948066e-05, |
|
"loss": 1.0541, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.4153678869155491, |
|
"grad_norm": 2.5507405445700377, |
|
"learning_rate": 1.3174965319110231e-05, |
|
"loss": 1.0279, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.41609278724175425, |
|
"grad_norm": 2.847963262037451, |
|
"learning_rate": 1.3152675012522629e-05, |
|
"loss": 1.108, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.4168176875679594, |
|
"grad_norm": 2.1767458920508784, |
|
"learning_rate": 1.313036729925541e-05, |
|
"loss": 1.0479, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.41754258789416454, |
|
"grad_norm": 3.6818743459800234, |
|
"learning_rate": 1.3108042302474858e-05, |
|
"loss": 1.1633, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.4182674882203697, |
|
"grad_norm": 2.4735330534741595, |
|
"learning_rate": 1.3085700145442653e-05, |
|
"loss": 0.9817, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.41899238854657483, |
|
"grad_norm": 2.099259381744443, |
|
"learning_rate": 1.3063340951515246e-05, |
|
"loss": 0.9347, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.41971728887278, |
|
"grad_norm": 2.3256524631392916, |
|
"learning_rate": 1.3040964844143144e-05, |
|
"loss": 1.0972, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.42044218919898513, |
|
"grad_norm": 2.5886329967045385, |
|
"learning_rate": 1.301857194687023e-05, |
|
"loss": 1.1258, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4211670895251903, |
|
"grad_norm": 2.0842353427178097, |
|
"learning_rate": 1.2996162383333097e-05, |
|
"loss": 1.0067, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.4218919898513954, |
|
"grad_norm": 2.0384216517802662, |
|
"learning_rate": 1.2973736277260351e-05, |
|
"loss": 1.0734, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.42261689017760057, |
|
"grad_norm": 2.4453213613902784, |
|
"learning_rate": 1.295129375247194e-05, |
|
"loss": 1.1793, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.4233417905038057, |
|
"grad_norm": 1.9025172070678824, |
|
"learning_rate": 1.292883493287846e-05, |
|
"loss": 1.083, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.42406669083001086, |
|
"grad_norm": 2.6363997339281595, |
|
"learning_rate": 1.290635994248047e-05, |
|
"loss": 1.0293, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.424791591156216, |
|
"grad_norm": 2.311748807183176, |
|
"learning_rate": 1.2883868905367818e-05, |
|
"loss": 1.1425, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.42551649148242116, |
|
"grad_norm": 2.357170974552937, |
|
"learning_rate": 1.2861361945718947e-05, |
|
"loss": 1.2254, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.4262413918086263, |
|
"grad_norm": 2.176179466256725, |
|
"learning_rate": 1.2838839187800218e-05, |
|
"loss": 1.0198, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.42696629213483145, |
|
"grad_norm": 2.9614059903538967, |
|
"learning_rate": 1.2816300755965202e-05, |
|
"loss": 1.1044, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.4276911924610366, |
|
"grad_norm": 2.089967789540345, |
|
"learning_rate": 1.2793746774654034e-05, |
|
"loss": 0.9743, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.42841609278724174, |
|
"grad_norm": 2.0512215889422114, |
|
"learning_rate": 1.277117736839268e-05, |
|
"loss": 1.0381, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.4291409931134469, |
|
"grad_norm": 3.3593984924680065, |
|
"learning_rate": 1.2748592661792278e-05, |
|
"loss": 1.0852, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.42986589343965204, |
|
"grad_norm": 1.8379649951016293, |
|
"learning_rate": 1.2725992779548451e-05, |
|
"loss": 1.0407, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.4305907937658572, |
|
"grad_norm": 2.0580648499060614, |
|
"learning_rate": 1.2703377846440591e-05, |
|
"loss": 0.9797, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.43131569409206233, |
|
"grad_norm": 2.547624173063941, |
|
"learning_rate": 1.2680747987331215e-05, |
|
"loss": 1.0976, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.4320405944182675, |
|
"grad_norm": 2.358740461402887, |
|
"learning_rate": 1.2658103327165226e-05, |
|
"loss": 0.9832, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.4327654947444726, |
|
"grad_norm": 2.2958356138544223, |
|
"learning_rate": 1.2635443990969266e-05, |
|
"loss": 1.0707, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.43349039507067777, |
|
"grad_norm": 2.2673074349057036, |
|
"learning_rate": 1.2612770103850998e-05, |
|
"loss": 1.155, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.4342152953968829, |
|
"grad_norm": 2.191734084847357, |
|
"learning_rate": 1.2590081790998414e-05, |
|
"loss": 1.1389, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.43494019572308806, |
|
"grad_norm": 2.2857227047676365, |
|
"learning_rate": 1.2567379177679178e-05, |
|
"loss": 0.9963, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4356650960492932, |
|
"grad_norm": 2.1784697059963047, |
|
"learning_rate": 1.254466238923989e-05, |
|
"loss": 1.0586, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.43638999637549836, |
|
"grad_norm": 2.521226757264337, |
|
"learning_rate": 1.2521931551105427e-05, |
|
"loss": 1.1325, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.4371148967017035, |
|
"grad_norm": 2.061977865722029, |
|
"learning_rate": 1.2499186788778223e-05, |
|
"loss": 0.9365, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.43783979702790865, |
|
"grad_norm": 2.447645929802628, |
|
"learning_rate": 1.2476428227837607e-05, |
|
"loss": 1.0145, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.4385646973541138, |
|
"grad_norm": 2.267005489506538, |
|
"learning_rate": 1.2453655993939088e-05, |
|
"loss": 1.0399, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.43928959768031894, |
|
"grad_norm": 2.084407985939619, |
|
"learning_rate": 1.2430870212813661e-05, |
|
"loss": 0.9565, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.4400144980065241, |
|
"grad_norm": 2.6037410215898458, |
|
"learning_rate": 1.2408071010267125e-05, |
|
"loss": 1.0644, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.44073939833272924, |
|
"grad_norm": 2.2081346806030235, |
|
"learning_rate": 1.2385258512179375e-05, |
|
"loss": 0.9149, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.4414642986589344, |
|
"grad_norm": 2.3544684711758173, |
|
"learning_rate": 1.2362432844503725e-05, |
|
"loss": 1.0141, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.44218919898513953, |
|
"grad_norm": 2.0791622079935914, |
|
"learning_rate": 1.233959413326619e-05, |
|
"loss": 1.0256, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4429140993113447, |
|
"grad_norm": 2.968477393667513, |
|
"learning_rate": 1.2316742504564811e-05, |
|
"loss": 1.1866, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.4436389996375498, |
|
"grad_norm": 2.076859528680039, |
|
"learning_rate": 1.2293878084568944e-05, |
|
"loss": 0.9385, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.44436389996375497, |
|
"grad_norm": 2.499714197915474, |
|
"learning_rate": 1.2271000999518563e-05, |
|
"loss": 1.1666, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.4450888002899601, |
|
"grad_norm": 2.003190354705559, |
|
"learning_rate": 1.2248111375723585e-05, |
|
"loss": 1.0057, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.44581370061616526, |
|
"grad_norm": 2.037354395792757, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.9492, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.4465386009423704, |
|
"grad_norm": 2.586450689063457, |
|
"learning_rate": 1.2202295017484911e-05, |
|
"loss": 1.0752, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.44726350126857556, |
|
"grad_norm": 1.903194057594901, |
|
"learning_rate": 1.2179368536004382e-05, |
|
"loss": 0.9995, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.4479884015947807, |
|
"grad_norm": 2.3340177701835523, |
|
"learning_rate": 1.2156430021704196e-05, |
|
"loss": 1.0832, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.44871330192098585, |
|
"grad_norm": 2.181064156987608, |
|
"learning_rate": 1.2133479601233431e-05, |
|
"loss": 0.9726, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.449438202247191, |
|
"grad_norm": 2.1594915956868097, |
|
"learning_rate": 1.2110517401306896e-05, |
|
"loss": 0.9393, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.45016310257339615, |
|
"grad_norm": 2.5184901765324934, |
|
"learning_rate": 1.2087543548704435e-05, |
|
"loss": 1.1569, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.4508880028996013, |
|
"grad_norm": 2.2361203028149585, |
|
"learning_rate": 1.2064558170270236e-05, |
|
"loss": 1.0704, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 1.9402238173723454, |
|
"learning_rate": 1.2041561392912118e-05, |
|
"loss": 1.0127, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.4523378035520116, |
|
"grad_norm": 2.6102129250515724, |
|
"learning_rate": 1.2018553343600841e-05, |
|
"loss": 1.1364, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.45306270387821673, |
|
"grad_norm": 2.0267045043502883, |
|
"learning_rate": 1.1995534149369397e-05, |
|
"loss": 1.0408, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4537876042044219, |
|
"grad_norm": 2.0973318190957353, |
|
"learning_rate": 1.1972503937312315e-05, |
|
"loss": 0.994, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.454512504530627, |
|
"grad_norm": 1.9897533182088447, |
|
"learning_rate": 1.194946283458495e-05, |
|
"loss": 0.9902, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.4552374048568322, |
|
"grad_norm": 2.624450240222456, |
|
"learning_rate": 1.1926410968402791e-05, |
|
"loss": 1.1201, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.4559623051830373, |
|
"grad_norm": 1.9043526191630893, |
|
"learning_rate": 1.1903348466040758e-05, |
|
"loss": 1.0846, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.45668720550924247, |
|
"grad_norm": 1.9419173600857174, |
|
"learning_rate": 1.1880275454832493e-05, |
|
"loss": 1.0469, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.4574121058354476, |
|
"grad_norm": 2.2783826877349673, |
|
"learning_rate": 1.1857192062169656e-05, |
|
"loss": 0.9387, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.45813700616165276, |
|
"grad_norm": 2.093160188755417, |
|
"learning_rate": 1.183409841550123e-05, |
|
"loss": 1.1626, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.4588619064878579, |
|
"grad_norm": 2.203652115607489, |
|
"learning_rate": 1.1810994642332808e-05, |
|
"loss": 1.0042, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.45958680681406305, |
|
"grad_norm": 2.0594672175740008, |
|
"learning_rate": 1.1787880870225905e-05, |
|
"loss": 1.0613, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.4603117071402682, |
|
"grad_norm": 2.1686023194883055, |
|
"learning_rate": 1.1764757226797233e-05, |
|
"loss": 1.0283, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.46103660746647335, |
|
"grad_norm": 2.3713976995681327, |
|
"learning_rate": 1.1741623839718002e-05, |
|
"loss": 1.0412, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.4617615077926785, |
|
"grad_norm": 2.3932119999968005, |
|
"learning_rate": 1.1718480836713228e-05, |
|
"loss": 1.0953, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.46248640811888364, |
|
"grad_norm": 2.411654231766739, |
|
"learning_rate": 1.169532834556102e-05, |
|
"loss": 1.0146, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.4632113084450888, |
|
"grad_norm": 2.481106610277318, |
|
"learning_rate": 1.167216649409187e-05, |
|
"loss": 0.9666, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.46393620877129393, |
|
"grad_norm": 2.466302214254987, |
|
"learning_rate": 1.164899541018794e-05, |
|
"loss": 1.1808, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4646611090974991, |
|
"grad_norm": 2.086736565800973, |
|
"learning_rate": 1.1625815221782386e-05, |
|
"loss": 1.066, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.4653860094237042, |
|
"grad_norm": 2.1129903671304344, |
|
"learning_rate": 1.1602626056858617e-05, |
|
"loss": 1.0851, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.4661109097499094, |
|
"grad_norm": 2.012359347751327, |
|
"learning_rate": 1.1579428043449612e-05, |
|
"loss": 0.9234, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.4668358100761145, |
|
"grad_norm": 1.731209443837663, |
|
"learning_rate": 1.1556221309637204e-05, |
|
"loss": 0.9405, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.46756071040231967, |
|
"grad_norm": 2.3738797051074587, |
|
"learning_rate": 1.1533005983551361e-05, |
|
"loss": 1.0442, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4682856107285248, |
|
"grad_norm": 2.5124526996392333, |
|
"learning_rate": 1.1509782193369514e-05, |
|
"loss": 1.0362, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.46901051105472996, |
|
"grad_norm": 2.2245947511275403, |
|
"learning_rate": 1.1486550067315803e-05, |
|
"loss": 1.0992, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.4697354113809351, |
|
"grad_norm": 2.3135678458341484, |
|
"learning_rate": 1.146330973366041e-05, |
|
"loss": 1.0667, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.47046031170714026, |
|
"grad_norm": 1.8464379606767438, |
|
"learning_rate": 1.1440061320718821e-05, |
|
"loss": 1.094, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.4711852120333454, |
|
"grad_norm": 2.0764785227825864, |
|
"learning_rate": 1.1416804956851138e-05, |
|
"loss": 0.8967, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.47191011235955055, |
|
"grad_norm": 2.434020067387766, |
|
"learning_rate": 1.1393540770461358e-05, |
|
"loss": 0.9351, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.4726350126857557, |
|
"grad_norm": 2.2367136913648578, |
|
"learning_rate": 1.1370268889996665e-05, |
|
"loss": 0.9879, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.47335991301196084, |
|
"grad_norm": 2.16313174121554, |
|
"learning_rate": 1.1346989443946734e-05, |
|
"loss": 1.1194, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.474084813338166, |
|
"grad_norm": 2.165617974637285, |
|
"learning_rate": 1.1323702560842998e-05, |
|
"loss": 1.0268, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.47480971366437114, |
|
"grad_norm": 2.150563367555029, |
|
"learning_rate": 1.1300408369257962e-05, |
|
"loss": 1.1303, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.4755346139905763, |
|
"grad_norm": 2.1496140379016513, |
|
"learning_rate": 1.127710699780448e-05, |
|
"loss": 1.0181, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.47625951431678143, |
|
"grad_norm": 1.9308649623116936, |
|
"learning_rate": 1.125379857513505e-05, |
|
"loss": 1.0044, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.4769844146429866, |
|
"grad_norm": 2.520717730566298, |
|
"learning_rate": 1.1230483229941092e-05, |
|
"loss": 1.0142, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.4777093149691917, |
|
"grad_norm": 1.9969419284442884, |
|
"learning_rate": 1.1207161090952255e-05, |
|
"loss": 0.9586, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.47843421529539687, |
|
"grad_norm": 2.199551212478649, |
|
"learning_rate": 1.1183832286935703e-05, |
|
"loss": 1.0442, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.479159115621602, |
|
"grad_norm": 2.4903266005653433, |
|
"learning_rate": 1.1160496946695388e-05, |
|
"loss": 1.057, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.47988401594780716, |
|
"grad_norm": 2.0757922408522447, |
|
"learning_rate": 1.1137155199071356e-05, |
|
"loss": 1.0865, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.4806089162740123, |
|
"grad_norm": 1.9478487958266806, |
|
"learning_rate": 1.1113807172939033e-05, |
|
"loss": 0.9878, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.48133381660021746, |
|
"grad_norm": 2.5077394876515027, |
|
"learning_rate": 1.1090452997208496e-05, |
|
"loss": 1.001, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.4820587169264226, |
|
"grad_norm": 2.3619801940222542, |
|
"learning_rate": 1.1067092800823798e-05, |
|
"loss": 1.0786, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.48278361725262775, |
|
"grad_norm": 2.4608146249469485, |
|
"learning_rate": 1.1043726712762213e-05, |
|
"loss": 0.8398, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.4835085175788329, |
|
"grad_norm": 2.5682411824097273, |
|
"learning_rate": 1.1020354862033553e-05, |
|
"loss": 1.0639, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.48423341790503804, |
|
"grad_norm": 2.2377079055852636, |
|
"learning_rate": 1.099697737767945e-05, |
|
"loss": 1.0577, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.4849583182312432, |
|
"grad_norm": 2.4684218039338366, |
|
"learning_rate": 1.097359438877263e-05, |
|
"loss": 1.2059, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.48568321855744834, |
|
"grad_norm": 1.9986702143467359, |
|
"learning_rate": 1.0950206024416228e-05, |
|
"loss": 1.1099, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4864081188836535, |
|
"grad_norm": 1.651255607571327, |
|
"learning_rate": 1.0926812413743041e-05, |
|
"loss": 1.0481, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.48713301920985863, |
|
"grad_norm": 2.1890746076308734, |
|
"learning_rate": 1.0903413685914843e-05, |
|
"loss": 0.9785, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.4878579195360638, |
|
"grad_norm": 1.983204547752865, |
|
"learning_rate": 1.0880009970121655e-05, |
|
"loss": 0.9511, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.4885828198622689, |
|
"grad_norm": 1.992684611945782, |
|
"learning_rate": 1.0856601395581037e-05, |
|
"loss": 1.0077, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.48930772018847407, |
|
"grad_norm": 2.155401888693332, |
|
"learning_rate": 1.0833188091537386e-05, |
|
"loss": 0.9072, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4900326205146792, |
|
"grad_norm": 2.7840807715345406, |
|
"learning_rate": 1.08097701872612e-05, |
|
"loss": 1.0153, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.49075752084088436, |
|
"grad_norm": 2.3948835243364996, |
|
"learning_rate": 1.0786347812048381e-05, |
|
"loss": 1.0366, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.4914824211670895, |
|
"grad_norm": 2.3996815926961865, |
|
"learning_rate": 1.0762921095219511e-05, |
|
"loss": 1.1168, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.49220732149329466, |
|
"grad_norm": 2.186176414807414, |
|
"learning_rate": 1.0739490166119155e-05, |
|
"loss": 0.9772, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.4929322218194998, |
|
"grad_norm": 2.397287885437739, |
|
"learning_rate": 1.0716055154115123e-05, |
|
"loss": 1.0678, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.49365712214570495, |
|
"grad_norm": 2.257426550702264, |
|
"learning_rate": 1.0692616188597775e-05, |
|
"loss": 0.9975, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.4943820224719101, |
|
"grad_norm": 2.2392714970259324, |
|
"learning_rate": 1.0669173398979292e-05, |
|
"loss": 0.9476, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.49510692279811525, |
|
"grad_norm": 1.9243901111495054, |
|
"learning_rate": 1.0645726914692973e-05, |
|
"loss": 1.0169, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.4958318231243204, |
|
"grad_norm": 1.874543436460937, |
|
"learning_rate": 1.062227686519252e-05, |
|
"loss": 0.9925, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.49655672345052554, |
|
"grad_norm": 2.4206739807777744, |
|
"learning_rate": 1.0598823379951315e-05, |
|
"loss": 0.9653, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4972816237767307, |
|
"grad_norm": 1.8452641728018429, |
|
"learning_rate": 1.057536658846171e-05, |
|
"loss": 1.1065, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.49800652410293583, |
|
"grad_norm": 2.615942811689107, |
|
"learning_rate": 1.0551906620234312e-05, |
|
"loss": 1.1092, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.498731424429141, |
|
"grad_norm": 2.7357084625917576, |
|
"learning_rate": 1.0528443604797266e-05, |
|
"loss": 1.1432, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.4994563247553461, |
|
"grad_norm": 2.2460612751605185, |
|
"learning_rate": 1.0504977671695542e-05, |
|
"loss": 0.9907, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.5001812250815513, |
|
"grad_norm": 2.753596180660813, |
|
"learning_rate": 1.0481508950490222e-05, |
|
"loss": 1.0457, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5001812250815513, |
|
"eval_loss": 1.8697493076324463, |
|
"eval_runtime": 1108.3592, |
|
"eval_samples_per_second": 15.928, |
|
"eval_steps_per_second": 0.498, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5009061254077565, |
|
"grad_norm": 2.6999729213529817, |
|
"learning_rate": 1.0458037570757781e-05, |
|
"loss": 1.0595, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.5016310257339616, |
|
"grad_norm": 2.211696739230099, |
|
"learning_rate": 1.0434563662089367e-05, |
|
"loss": 0.9517, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.5023559260601668, |
|
"grad_norm": 2.4087448709687935, |
|
"learning_rate": 1.04110873540901e-05, |
|
"loss": 1.1667, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.5030808263863719, |
|
"grad_norm": 2.4196372563289645, |
|
"learning_rate": 1.0387608776378337e-05, |
|
"loss": 1.0019, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.5038057267125771, |
|
"grad_norm": 2.2271775487795424, |
|
"learning_rate": 1.0364128058584974e-05, |
|
"loss": 1.0789, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.5045306270387822, |
|
"grad_norm": 2.3449990407458, |
|
"learning_rate": 1.0340645330352723e-05, |
|
"loss": 1.0373, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.5052555273649874, |
|
"grad_norm": 2.0780920184441847, |
|
"learning_rate": 1.0317160721335393e-05, |
|
"loss": 1.0106, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.5059804276911924, |
|
"grad_norm": 2.5924431334903186, |
|
"learning_rate": 1.0293674361197173e-05, |
|
"loss": 1.0588, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.5067053280173976, |
|
"grad_norm": 2.1365135156985744, |
|
"learning_rate": 1.0270186379611932e-05, |
|
"loss": 1.023, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.5074302283436027, |
|
"grad_norm": 2.173447677361539, |
|
"learning_rate": 1.0246696906262484e-05, |
|
"loss": 1.0468, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5081551286698079, |
|
"grad_norm": 2.245470686825682, |
|
"learning_rate": 1.0223206070839878e-05, |
|
"loss": 1.0525, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.508880028996013, |
|
"grad_norm": 2.2398665851941706, |
|
"learning_rate": 1.0199714003042685e-05, |
|
"loss": 1.0942, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.5096049293222182, |
|
"grad_norm": 2.5997096574935314, |
|
"learning_rate": 1.017622083257628e-05, |
|
"loss": 1.0766, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.5103298296484233, |
|
"grad_norm": 2.4146958737582644, |
|
"learning_rate": 1.0152726689152135e-05, |
|
"loss": 1.0928, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.5110547299746285, |
|
"grad_norm": 2.130801798872479, |
|
"learning_rate": 1.0129231702487077e-05, |
|
"loss": 0.972, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5117796303008336, |
|
"grad_norm": 2.236352513835616, |
|
"learning_rate": 1.01057360023026e-05, |
|
"loss": 1.0103, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.5125045306270388, |
|
"grad_norm": 2.188186582922333, |
|
"learning_rate": 1.0082239718324136e-05, |
|
"loss": 1.0367, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.5132294309532439, |
|
"grad_norm": 2.198716617442744, |
|
"learning_rate": 1.0058742980280341e-05, |
|
"loss": 0.972, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.5139543312794491, |
|
"grad_norm": 2.294063462573355, |
|
"learning_rate": 1.0035245917902376e-05, |
|
"loss": 1.0778, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.5146792316056542, |
|
"grad_norm": 2.228364149473082, |
|
"learning_rate": 1.001174866092319e-05, |
|
"loss": 1.0271, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5154041319318594, |
|
"grad_norm": 1.9300202276554854, |
|
"learning_rate": 9.988251339076811e-06, |
|
"loss": 0.9044, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 2.3634423427900533, |
|
"learning_rate": 9.964754082097626e-06, |
|
"loss": 1.0843, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.5168539325842697, |
|
"grad_norm": 2.38754231763043, |
|
"learning_rate": 9.941257019719662e-06, |
|
"loss": 1.0854, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.5175788329104748, |
|
"grad_norm": 1.8044209443143941, |
|
"learning_rate": 9.917760281675867e-06, |
|
"loss": 0.9059, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.51830373323668, |
|
"grad_norm": 2.506057831976918, |
|
"learning_rate": 9.894263997697405e-06, |
|
"loss": 1.0033, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5190286335628851, |
|
"grad_norm": 2.383694009514269, |
|
"learning_rate": 9.870768297512924e-06, |
|
"loss": 0.9435, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.5197535338890903, |
|
"grad_norm": 2.7218871091799195, |
|
"learning_rate": 9.847273310847869e-06, |
|
"loss": 1.013, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.5204784342152954, |
|
"grad_norm": 1.9488674447864536, |
|
"learning_rate": 9.823779167423723e-06, |
|
"loss": 1.064, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.5212033345415006, |
|
"grad_norm": 2.543494468466047, |
|
"learning_rate": 9.80028599695732e-06, |
|
"loss": 1.0396, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.5219282348677057, |
|
"grad_norm": 2.1642282148880976, |
|
"learning_rate": 9.776793929160129e-06, |
|
"loss": 0.9959, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5226531351939109, |
|
"grad_norm": 1.9378193767391163, |
|
"learning_rate": 9.753303093737518e-06, |
|
"loss": 1.0852, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.523378035520116, |
|
"grad_norm": 2.5527944661940536, |
|
"learning_rate": 9.729813620388071e-06, |
|
"loss": 1.1522, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.5241029358463212, |
|
"grad_norm": 2.3217437760784074, |
|
"learning_rate": 9.70632563880283e-06, |
|
"loss": 1.0067, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.5248278361725263, |
|
"grad_norm": 3.1417602276811527, |
|
"learning_rate": 9.682839278664614e-06, |
|
"loss": 0.9952, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.5255527364987315, |
|
"grad_norm": 1.9148171819921405, |
|
"learning_rate": 9.659354669647277e-06, |
|
"loss": 0.9656, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5262776368249366, |
|
"grad_norm": 2.449509825571267, |
|
"learning_rate": 9.635871941415026e-06, |
|
"loss": 0.9131, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.5270025371511418, |
|
"grad_norm": 2.1963443291334697, |
|
"learning_rate": 9.612391223621667e-06, |
|
"loss": 1.0032, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.5277274374773469, |
|
"grad_norm": 2.2338194520764074, |
|
"learning_rate": 9.588912645909905e-06, |
|
"loss": 1.0494, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.528452337803552, |
|
"grad_norm": 1.9959846236290153, |
|
"learning_rate": 9.565436337910637e-06, |
|
"loss": 0.947, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.5291772381297571, |
|
"grad_norm": 2.078163095936865, |
|
"learning_rate": 9.54196242924222e-06, |
|
"loss": 1.0311, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.5299021384559623, |
|
"grad_norm": 2.1980066549775024, |
|
"learning_rate": 9.51849104950978e-06, |
|
"loss": 1.0051, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.5306270387821674, |
|
"grad_norm": 2.4191287974234705, |
|
"learning_rate": 9.495022328304461e-06, |
|
"loss": 1.0469, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.5313519391083726, |
|
"grad_norm": 2.298052468185075, |
|
"learning_rate": 9.47155639520274e-06, |
|
"loss": 1.0115, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.5320768394345777, |
|
"grad_norm": 2.1232889455421406, |
|
"learning_rate": 9.448093379765693e-06, |
|
"loss": 1.0524, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.5328017397607829, |
|
"grad_norm": 2.44786208230678, |
|
"learning_rate": 9.424633411538289e-06, |
|
"loss": 1.0772, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.533526640086988, |
|
"grad_norm": 2.102354340691057, |
|
"learning_rate": 9.401176620048687e-06, |
|
"loss": 1.0344, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.5342515404131932, |
|
"grad_norm": 2.108353702137098, |
|
"learning_rate": 9.377723134807482e-06, |
|
"loss": 1.1669, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.5349764407393983, |
|
"grad_norm": 2.4286899167576412, |
|
"learning_rate": 9.35427308530703e-06, |
|
"loss": 1.0635, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.5357013410656035, |
|
"grad_norm": 2.2012613191456025, |
|
"learning_rate": 9.330826601020713e-06, |
|
"loss": 1.1047, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.5364262413918086, |
|
"grad_norm": 2.7290128431948646, |
|
"learning_rate": 9.307383811402229e-06, |
|
"loss": 1.0265, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5371511417180138, |
|
"grad_norm": 2.3858907048476135, |
|
"learning_rate": 9.283944845884878e-06, |
|
"loss": 0.9387, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.5378760420442189, |
|
"grad_norm": 2.080475476085547, |
|
"learning_rate": 9.260509833880848e-06, |
|
"loss": 0.9617, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.5386009423704241, |
|
"grad_norm": 1.8615886294130846, |
|
"learning_rate": 9.23707890478049e-06, |
|
"loss": 0.8874, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.5393258426966292, |
|
"grad_norm": 1.9904190396103651, |
|
"learning_rate": 9.213652187951624e-06, |
|
"loss": 1.0746, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.5400507430228344, |
|
"grad_norm": 2.96232733164202, |
|
"learning_rate": 9.190229812738802e-06, |
|
"loss": 1.1089, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.5407756433490395, |
|
"grad_norm": 2.525943985627746, |
|
"learning_rate": 9.166811908462616e-06, |
|
"loss": 1.0642, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.5415005436752447, |
|
"grad_norm": 2.2418322634514167, |
|
"learning_rate": 9.143398604418965e-06, |
|
"loss": 1.1124, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.5422254440014498, |
|
"grad_norm": 1.9220381898458985, |
|
"learning_rate": 9.11999002987835e-06, |
|
"loss": 0.9473, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.542950344327655, |
|
"grad_norm": 2.6621473432063176, |
|
"learning_rate": 9.096586314085162e-06, |
|
"loss": 1.0125, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.5436752446538601, |
|
"grad_norm": 2.3340603683637156, |
|
"learning_rate": 9.07318758625696e-06, |
|
"loss": 1.0542, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5444001449800653, |
|
"grad_norm": 2.1363055122068215, |
|
"learning_rate": 9.049793975583775e-06, |
|
"loss": 1.0586, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.5451250453062704, |
|
"grad_norm": 2.0326003541767803, |
|
"learning_rate": 9.026405611227371e-06, |
|
"loss": 0.9054, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.5458499456324756, |
|
"grad_norm": 1.6432957234320769, |
|
"learning_rate": 9.003022622320555e-06, |
|
"loss": 0.9625, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.5465748459586807, |
|
"grad_norm": 1.78897998635208, |
|
"learning_rate": 8.97964513796645e-06, |
|
"loss": 1.0138, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.5472997462848859, |
|
"grad_norm": 2.797841405037185, |
|
"learning_rate": 8.95627328723779e-06, |
|
"loss": 1.2344, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.548024646611091, |
|
"grad_norm": 2.0182212487321864, |
|
"learning_rate": 8.932907199176206e-06, |
|
"loss": 1.0249, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.5487495469372962, |
|
"grad_norm": 2.1185607535847586, |
|
"learning_rate": 8.909547002791506e-06, |
|
"loss": 1.0009, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.5494744472635013, |
|
"grad_norm": 2.5153165221241687, |
|
"learning_rate": 8.886192827060974e-06, |
|
"loss": 1.0271, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.5501993475897065, |
|
"grad_norm": 2.205784694298984, |
|
"learning_rate": 8.862844800928645e-06, |
|
"loss": 1.0508, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.5509242479159115, |
|
"grad_norm": 2.908166150479793, |
|
"learning_rate": 8.839503053304614e-06, |
|
"loss": 1.039, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5516491482421167, |
|
"grad_norm": 1.9201627389676703, |
|
"learning_rate": 8.8161677130643e-06, |
|
"loss": 0.9532, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.5523740485683218, |
|
"grad_norm": 2.923787620312773, |
|
"learning_rate": 8.792838909047747e-06, |
|
"loss": 1.0188, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.553098948894527, |
|
"grad_norm": 3.157465367532413, |
|
"learning_rate": 8.769516770058915e-06, |
|
"loss": 1.0947, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.5538238492207321, |
|
"grad_norm": 2.060375498630697, |
|
"learning_rate": 8.746201424864956e-06, |
|
"loss": 0.9142, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.5545487495469373, |
|
"grad_norm": 1.9982518012339532, |
|
"learning_rate": 8.722893002195523e-06, |
|
"loss": 0.9624, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5552736498731424, |
|
"grad_norm": 1.841122163960603, |
|
"learning_rate": 8.699591630742042e-06, |
|
"loss": 0.979, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.5559985501993476, |
|
"grad_norm": 2.0151604980838633, |
|
"learning_rate": 8.676297439157007e-06, |
|
"loss": 0.994, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.5567234505255527, |
|
"grad_norm": 2.5456721286611037, |
|
"learning_rate": 8.653010556053271e-06, |
|
"loss": 1.0495, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.5574483508517579, |
|
"grad_norm": 2.7097490860787183, |
|
"learning_rate": 8.629731110003337e-06, |
|
"loss": 1.124, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.558173251177963, |
|
"grad_norm": 1.949724143019753, |
|
"learning_rate": 8.606459229538645e-06, |
|
"loss": 0.997, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5588981515041682, |
|
"grad_norm": 2.020801130677908, |
|
"learning_rate": 8.583195043148864e-06, |
|
"loss": 1.1449, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.5596230518303733, |
|
"grad_norm": 2.130589840488668, |
|
"learning_rate": 8.55993867928118e-06, |
|
"loss": 0.9983, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.5603479521565785, |
|
"grad_norm": 2.0836041709296316, |
|
"learning_rate": 8.536690266339593e-06, |
|
"loss": 0.9946, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.5610728524827836, |
|
"grad_norm": 2.3227053384824274, |
|
"learning_rate": 8.513449932684198e-06, |
|
"loss": 0.8824, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.5617977528089888, |
|
"grad_norm": 1.983623134191808, |
|
"learning_rate": 8.490217806630489e-06, |
|
"loss": 0.9447, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5625226531351939, |
|
"grad_norm": 2.1427968901005725, |
|
"learning_rate": 8.46699401644864e-06, |
|
"loss": 1.0275, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.5632475534613991, |
|
"grad_norm": 2.263521727209926, |
|
"learning_rate": 8.443778690362801e-06, |
|
"loss": 0.9779, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.5639724537876042, |
|
"grad_norm": 1.784185254505844, |
|
"learning_rate": 8.42057195655039e-06, |
|
"loss": 0.9217, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.5646973541138094, |
|
"grad_norm": 2.5535009035102934, |
|
"learning_rate": 8.397373943141384e-06, |
|
"loss": 1.0259, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.5654222544400145, |
|
"grad_norm": 2.3274203754164873, |
|
"learning_rate": 8.374184778217617e-06, |
|
"loss": 1.0449, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5661471547662197, |
|
"grad_norm": 2.751811026011905, |
|
"learning_rate": 8.351004589812061e-06, |
|
"loss": 1.0106, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.5668720550924248, |
|
"grad_norm": 2.0365364413353726, |
|
"learning_rate": 8.327833505908135e-06, |
|
"loss": 1.0282, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.56759695541863, |
|
"grad_norm": 2.3504702430826154, |
|
"learning_rate": 8.30467165443898e-06, |
|
"loss": 1.0629, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.5683218557448351, |
|
"grad_norm": 2.0575133385244646, |
|
"learning_rate": 8.281519163286772e-06, |
|
"loss": 1.0005, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.5690467560710403, |
|
"grad_norm": 2.5919383011426467, |
|
"learning_rate": 8.258376160282001e-06, |
|
"loss": 1.0555, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5697716563972454, |
|
"grad_norm": 2.4060395092358102, |
|
"learning_rate": 8.235242773202772e-06, |
|
"loss": 1.0631, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.5704965567234506, |
|
"grad_norm": 1.9397501796747694, |
|
"learning_rate": 8.212119129774098e-06, |
|
"loss": 1.1304, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.5712214570496557, |
|
"grad_norm": 2.110530023102397, |
|
"learning_rate": 8.189005357667191e-06, |
|
"loss": 0.9567, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.5719463573758609, |
|
"grad_norm": 2.1870319191564658, |
|
"learning_rate": 8.165901584498774e-06, |
|
"loss": 1.0683, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.572671257702066, |
|
"grad_norm": 1.8495796212471047, |
|
"learning_rate": 8.142807937830348e-06, |
|
"loss": 0.9837, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.5733961580282712, |
|
"grad_norm": 2.64388618965451, |
|
"learning_rate": 8.11972454516751e-06, |
|
"loss": 0.9659, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.5741210583544762, |
|
"grad_norm": 2.0330246862165415, |
|
"learning_rate": 8.096651533959244e-06, |
|
"loss": 0.9591, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.5748459586806814, |
|
"grad_norm": 2.2631207779996827, |
|
"learning_rate": 8.073589031597209e-06, |
|
"loss": 0.9873, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.5755708590068865, |
|
"grad_norm": 2.3948632401763064, |
|
"learning_rate": 8.050537165415053e-06, |
|
"loss": 1.1371, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.5762957593330917, |
|
"grad_norm": 2.075607270336768, |
|
"learning_rate": 8.027496062687688e-06, |
|
"loss": 0.8908, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5770206596592968, |
|
"grad_norm": 2.065746544851814, |
|
"learning_rate": 8.004465850630605e-06, |
|
"loss": 1.0025, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.577745559985502, |
|
"grad_norm": 2.2202432240869845, |
|
"learning_rate": 7.981446656399162e-06, |
|
"loss": 1.0429, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.5784704603117071, |
|
"grad_norm": 2.309075300778911, |
|
"learning_rate": 7.958438607087884e-06, |
|
"loss": 0.9851, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.5791953606379123, |
|
"grad_norm": 2.4033759237105166, |
|
"learning_rate": 7.935441829729766e-06, |
|
"loss": 1.032, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.5799202609641174, |
|
"grad_norm": 2.625985750403029, |
|
"learning_rate": 7.912456451295567e-06, |
|
"loss": 1.0102, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 2.410099594821792, |
|
"learning_rate": 7.889482598693108e-06, |
|
"loss": 0.9639, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.5813700616165277, |
|
"grad_norm": 1.9619910669766136, |
|
"learning_rate": 7.866520398766574e-06, |
|
"loss": 0.9297, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.5820949619427329, |
|
"grad_norm": 2.930559344753908, |
|
"learning_rate": 7.843569978295804e-06, |
|
"loss": 1.0681, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.582819862268938, |
|
"grad_norm": 1.8579566089499246, |
|
"learning_rate": 7.820631463995622e-06, |
|
"loss": 0.9247, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.5835447625951432, |
|
"grad_norm": 1.783098743265068, |
|
"learning_rate": 7.797704982515094e-06, |
|
"loss": 0.9649, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5842696629213483, |
|
"grad_norm": 1.9534992322071332, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.9234, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.5849945632475535, |
|
"grad_norm": 2.3775300305715428, |
|
"learning_rate": 7.751888624276418e-06, |
|
"loss": 1.0246, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.5857194635737586, |
|
"grad_norm": 2.0533804995629525, |
|
"learning_rate": 7.728999000481436e-06, |
|
"loss": 0.9705, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.5864443638999638, |
|
"grad_norm": 1.959728561512236, |
|
"learning_rate": 7.706121915431059e-06, |
|
"loss": 0.9359, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.5871692642261689, |
|
"grad_norm": 2.502678481308203, |
|
"learning_rate": 7.68325749543519e-06, |
|
"loss": 0.9802, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5878941645523741, |
|
"grad_norm": 3.0385891335086352, |
|
"learning_rate": 7.660405866733813e-06, |
|
"loss": 1.1222, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.5886190648785792, |
|
"grad_norm": 2.040606391856475, |
|
"learning_rate": 7.637567155496277e-06, |
|
"loss": 1.02, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.5893439652047844, |
|
"grad_norm": 2.305055241806458, |
|
"learning_rate": 7.614741487820626e-06, |
|
"loss": 0.9964, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.5900688655309895, |
|
"grad_norm": 1.882568911490993, |
|
"learning_rate": 7.5919289897328784e-06, |
|
"loss": 0.9941, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.5907937658571947, |
|
"grad_norm": 2.5290386344366156, |
|
"learning_rate": 7.569129787186342e-06, |
|
"loss": 1.0734, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5915186661833998, |
|
"grad_norm": 2.8819681752053454, |
|
"learning_rate": 7.546344006060918e-06, |
|
"loss": 1.0041, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.592243566509605, |
|
"grad_norm": 1.8804822924012536, |
|
"learning_rate": 7.523571772162392e-06, |
|
"loss": 0.9151, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.5929684668358101, |
|
"grad_norm": 2.255992660838785, |
|
"learning_rate": 7.500813211221778e-06, |
|
"loss": 1.0903, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.5936933671620153, |
|
"grad_norm": 2.9086758708738345, |
|
"learning_rate": 7.478068448894577e-06, |
|
"loss": 1.0814, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.5944182674882204, |
|
"grad_norm": 2.437481894199292, |
|
"learning_rate": 7.455337610760114e-06, |
|
"loss": 1.1113, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5951431678144256, |
|
"grad_norm": 1.8660888898062387, |
|
"learning_rate": 7.4326208223208274e-06, |
|
"loss": 0.9695, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.5958680681406306, |
|
"grad_norm": 2.1229612134239995, |
|
"learning_rate": 7.409918209001585e-06, |
|
"loss": 1.0515, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.5965929684668358, |
|
"grad_norm": 2.3583730518901995, |
|
"learning_rate": 7.387229896149006e-06, |
|
"loss": 0.9714, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.5973178687930409, |
|
"grad_norm": 2.3153711669204413, |
|
"learning_rate": 7.364556009030734e-06, |
|
"loss": 1.059, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.5980427691192461, |
|
"grad_norm": 1.9967290233140527, |
|
"learning_rate": 7.341896672834776e-06, |
|
"loss": 1.047, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5987676694454512, |
|
"grad_norm": 1.9339818611902988, |
|
"learning_rate": 7.31925201266879e-06, |
|
"loss": 0.972, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.5994925697716564, |
|
"grad_norm": 1.9457458272271317, |
|
"learning_rate": 7.2966221535594085e-06, |
|
"loss": 1.0206, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.6002174700978615, |
|
"grad_norm": 2.1792046814563495, |
|
"learning_rate": 7.274007220451553e-06, |
|
"loss": 1.0732, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.6009423704240667, |
|
"grad_norm": 2.029220407091385, |
|
"learning_rate": 7.251407338207725e-06, |
|
"loss": 0.8824, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.6016672707502718, |
|
"grad_norm": 1.915241829499402, |
|
"learning_rate": 7.228822631607327e-06, |
|
"loss": 0.9666, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.602392171076477, |
|
"grad_norm": 1.8518746441416718, |
|
"learning_rate": 7.2062532253459714e-06, |
|
"loss": 0.8742, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.6031170714026821, |
|
"grad_norm": 2.5929079919190547, |
|
"learning_rate": 7.183699244034797e-06, |
|
"loss": 1.1043, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.6038419717288873, |
|
"grad_norm": 2.2618493759552516, |
|
"learning_rate": 7.161160812199785e-06, |
|
"loss": 1.1689, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.6045668720550924, |
|
"grad_norm": 1.9916609519278592, |
|
"learning_rate": 7.138638054281055e-06, |
|
"loss": 1.0807, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.6052917723812976, |
|
"grad_norm": 1.8106802403260034, |
|
"learning_rate": 7.116131094632186e-06, |
|
"loss": 0.8694, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.6060166727075027, |
|
"grad_norm": 2.2006653848900015, |
|
"learning_rate": 7.093640057519531e-06, |
|
"loss": 1.0905, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.6067415730337079, |
|
"grad_norm": 2.3615298366429496, |
|
"learning_rate": 7.0711650671215405e-06, |
|
"loss": 1.0178, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.607466473359913, |
|
"grad_norm": 1.9549782714474917, |
|
"learning_rate": 7.048706247528061e-06, |
|
"loss": 1.0098, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.6081913736861182, |
|
"grad_norm": 2.2334786388231396, |
|
"learning_rate": 7.02626372273965e-06, |
|
"loss": 1.0028, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.6089162740123233, |
|
"grad_norm": 2.515375317083732, |
|
"learning_rate": 7.003837616666906e-06, |
|
"loss": 0.8934, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.6096411743385285, |
|
"grad_norm": 2.0714175972616347, |
|
"learning_rate": 6.98142805312977e-06, |
|
"loss": 1.008, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.6103660746647336, |
|
"grad_norm": 2.183667905495028, |
|
"learning_rate": 6.959035155856857e-06, |
|
"loss": 1.0119, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.6110909749909388, |
|
"grad_norm": 2.178274745085645, |
|
"learning_rate": 6.936659048484755e-06, |
|
"loss": 0.9791, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.6118158753171439, |
|
"grad_norm": 2.2299367836336903, |
|
"learning_rate": 6.914299854557349e-06, |
|
"loss": 1.0342, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.6125407756433491, |
|
"grad_norm": 1.870612790239624, |
|
"learning_rate": 6.891957697525149e-06, |
|
"loss": 1.0123, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.6132656759695542, |
|
"grad_norm": 2.2457853373416996, |
|
"learning_rate": 6.869632700744588e-06, |
|
"loss": 0.9599, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.6139905762957594, |
|
"grad_norm": 1.8964211823089199, |
|
"learning_rate": 6.847324987477375e-06, |
|
"loss": 1.0702, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.6147154766219645, |
|
"grad_norm": 2.2231409481558595, |
|
"learning_rate": 6.82503468088977e-06, |
|
"loss": 0.9252, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.6154403769481697, |
|
"grad_norm": 2.2158057236580575, |
|
"learning_rate": 6.802761904051937e-06, |
|
"loss": 0.8728, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.6161652772743748, |
|
"grad_norm": 1.984090771986885, |
|
"learning_rate": 6.78050677993725e-06, |
|
"loss": 0.9488, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.61689017760058, |
|
"grad_norm": 2.194669700982665, |
|
"learning_rate": 6.758269431421618e-06, |
|
"loss": 1.0144, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.617615077926785, |
|
"grad_norm": 2.391278122000099, |
|
"learning_rate": 6.7360499812828195e-06, |
|
"loss": 0.9911, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.6183399782529903, |
|
"grad_norm": 2.022013346401567, |
|
"learning_rate": 6.713848552199791e-06, |
|
"loss": 0.9898, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.6190648785791953, |
|
"grad_norm": 2.109272857624347, |
|
"learning_rate": 6.6916652667519855e-06, |
|
"loss": 0.9562, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.6197897789054005, |
|
"grad_norm": 2.829806104335344, |
|
"learning_rate": 6.669500247418674e-06, |
|
"loss": 1.0893, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6205146792316056, |
|
"grad_norm": 2.2159865585012715, |
|
"learning_rate": 6.647353616578274e-06, |
|
"loss": 1.0566, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.6212395795578108, |
|
"grad_norm": 2.5699500401567272, |
|
"learning_rate": 6.625225496507688e-06, |
|
"loss": 0.9406, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.6219644798840159, |
|
"grad_norm": 1.9318589400149984, |
|
"learning_rate": 6.6031160093816005e-06, |
|
"loss": 0.9556, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.6226893802102211, |
|
"grad_norm": 2.046984039555971, |
|
"learning_rate": 6.5810252772718276e-06, |
|
"loss": 1.0834, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.6234142805364262, |
|
"grad_norm": 2.6992216221025087, |
|
"learning_rate": 6.5589534221466324e-06, |
|
"loss": 0.9793, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6241391808626314, |
|
"grad_norm": 2.212396980492137, |
|
"learning_rate": 6.536900565870052e-06, |
|
"loss": 0.8663, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.6248640811888365, |
|
"grad_norm": 1.7999266895906552, |
|
"learning_rate": 6.514866830201234e-06, |
|
"loss": 0.8443, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.6255889815150417, |
|
"grad_norm": 2.1176385115675083, |
|
"learning_rate": 6.4928523367937444e-06, |
|
"loss": 0.9933, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.6263138818412468, |
|
"grad_norm": 2.2762631364540784, |
|
"learning_rate": 6.470857207194918e-06, |
|
"loss": 1.0051, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.627038782167452, |
|
"grad_norm": 2.0490123793497976, |
|
"learning_rate": 6.448881562845169e-06, |
|
"loss": 0.9627, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6277636824936571, |
|
"grad_norm": 2.5600372472836117, |
|
"learning_rate": 6.426925525077341e-06, |
|
"loss": 1.0176, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.6284885828198623, |
|
"grad_norm": 2.8510918650806882, |
|
"learning_rate": 6.404989215116012e-06, |
|
"loss": 1.0215, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.6292134831460674, |
|
"grad_norm": 1.9769591446882206, |
|
"learning_rate": 6.3830727540768445e-06, |
|
"loss": 1.0687, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.6299383834722726, |
|
"grad_norm": 1.677393229029825, |
|
"learning_rate": 6.361176262965909e-06, |
|
"loss": 0.9384, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.6306632837984777, |
|
"grad_norm": 2.4132581003126705, |
|
"learning_rate": 6.339299862679016e-06, |
|
"loss": 0.973, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.6313881841246829, |
|
"grad_norm": 2.562250697801829, |
|
"learning_rate": 6.317443674001055e-06, |
|
"loss": 0.9105, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.632113084450888, |
|
"grad_norm": 2.0225187463112033, |
|
"learning_rate": 6.295607817605316e-06, |
|
"loss": 0.9315, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.6328379847770932, |
|
"grad_norm": 2.6962240997771167, |
|
"learning_rate": 6.273792414052833e-06, |
|
"loss": 1.058, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.6335628851032983, |
|
"grad_norm": 2.3293379147129705, |
|
"learning_rate": 6.251997583791711e-06, |
|
"loss": 1.0571, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.6342877854295035, |
|
"grad_norm": 2.17475653423949, |
|
"learning_rate": 6.230223447156469e-06, |
|
"loss": 1.0355, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.6350126857557086, |
|
"grad_norm": 2.293124315791374, |
|
"learning_rate": 6.208470124367374e-06, |
|
"loss": 1.0007, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.6357375860819138, |
|
"grad_norm": 2.1218951006607236, |
|
"learning_rate": 6.186737735529769e-06, |
|
"loss": 0.9075, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.6364624864081189, |
|
"grad_norm": 2.1947275964384114, |
|
"learning_rate": 6.165026400633416e-06, |
|
"loss": 1.0155, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.6371873867343241, |
|
"grad_norm": 1.8448468197236452, |
|
"learning_rate": 6.143336239551836e-06, |
|
"loss": 0.9704, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.6379122870605292, |
|
"grad_norm": 2.2203852100211496, |
|
"learning_rate": 6.12166737204164e-06, |
|
"loss": 1.0183, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.6386371873867344, |
|
"grad_norm": 1.9940936134783194, |
|
"learning_rate": 6.10001991774188e-06, |
|
"loss": 1.0362, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.6393620877129395, |
|
"grad_norm": 1.876178541905188, |
|
"learning_rate": 6.078393996173375e-06, |
|
"loss": 1.0958, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.6400869880391447, |
|
"grad_norm": 2.4300065422050174, |
|
"learning_rate": 6.056789726738055e-06, |
|
"loss": 1.0112, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.6408118883653497, |
|
"grad_norm": 2.182070128778044, |
|
"learning_rate": 6.035207228718305e-06, |
|
"loss": 1.0532, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.641536788691555, |
|
"grad_norm": 2.2790151746934826, |
|
"learning_rate": 6.0136466212763055e-06, |
|
"loss": 0.9994, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.64226168901776, |
|
"grad_norm": 2.0380124474521333, |
|
"learning_rate": 5.992108023453376e-06, |
|
"loss": 0.9901, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.6429865893439652, |
|
"grad_norm": 2.3256602048348234, |
|
"learning_rate": 5.9705915541693114e-06, |
|
"loss": 1.1098, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.6437114896701703, |
|
"grad_norm": 2.753858071131981, |
|
"learning_rate": 5.949097332221734e-06, |
|
"loss": 1.0406, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.6444363899963755, |
|
"grad_norm": 2.068578563869525, |
|
"learning_rate": 5.927625476285426e-06, |
|
"loss": 0.9272, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 2.0943155971716947, |
|
"learning_rate": 5.906176104911687e-06, |
|
"loss": 1.0903, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.6458861906487858, |
|
"grad_norm": 2.244523642610638, |
|
"learning_rate": 5.884749336527676e-06, |
|
"loss": 1.0465, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.6466110909749909, |
|
"grad_norm": 2.2948461929009554, |
|
"learning_rate": 5.8633452894357516e-06, |
|
"loss": 0.9543, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.6473359913011961, |
|
"grad_norm": 2.316443702020551, |
|
"learning_rate": 5.841964081812822e-06, |
|
"loss": 1.0483, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.6480608916274012, |
|
"grad_norm": 2.210302183543952, |
|
"learning_rate": 5.820605831709694e-06, |
|
"loss": 0.8937, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.6487857919536064, |
|
"grad_norm": 2.1138527969671097, |
|
"learning_rate": 5.799270657050422e-06, |
|
"loss": 0.8968, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.6495106922798115, |
|
"grad_norm": 2.136178605345999, |
|
"learning_rate": 5.777958675631657e-06, |
|
"loss": 1.008, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.6502355926060167, |
|
"grad_norm": 1.9381106844139175, |
|
"learning_rate": 5.756670005121987e-06, |
|
"loss": 0.9595, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.6509604929322218, |
|
"grad_norm": 2.0173878555394604, |
|
"learning_rate": 5.735404763061303e-06, |
|
"loss": 0.9614, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.651685393258427, |
|
"grad_norm": 2.159725328780946, |
|
"learning_rate": 5.714163066860132e-06, |
|
"loss": 0.9797, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.6524102935846321, |
|
"grad_norm": 2.0644994410010713, |
|
"learning_rate": 5.6929450337990175e-06, |
|
"loss": 0.9167, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6531351939108373, |
|
"grad_norm": 2.019906324854319, |
|
"learning_rate": 5.671750781027836e-06, |
|
"loss": 0.9609, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.6538600942370424, |
|
"grad_norm": 2.774207911473111, |
|
"learning_rate": 5.650580425565166e-06, |
|
"loss": 0.9905, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.6545849945632476, |
|
"grad_norm": 1.9230557138061348, |
|
"learning_rate": 5.629434084297654e-06, |
|
"loss": 1.053, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.6553098948894527, |
|
"grad_norm": 1.6916309404007928, |
|
"learning_rate": 5.60831187397935e-06, |
|
"loss": 0.9022, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.6560347952156579, |
|
"grad_norm": 2.292413850175932, |
|
"learning_rate": 5.58721391123108e-06, |
|
"loss": 0.9563, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.656759695541863, |
|
"grad_norm": 2.187934211851262, |
|
"learning_rate": 5.566140312539786e-06, |
|
"loss": 1.0045, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.6574845958680682, |
|
"grad_norm": 2.2765131164518726, |
|
"learning_rate": 5.545091194257892e-06, |
|
"loss": 1.121, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.6582094961942733, |
|
"grad_norm": 4.530518710071565, |
|
"learning_rate": 5.524066672602662e-06, |
|
"loss": 1.1574, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.6589343965204785, |
|
"grad_norm": 2.964421873486366, |
|
"learning_rate": 5.503066863655546e-06, |
|
"loss": 0.9753, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.6596592968466836, |
|
"grad_norm": 1.7482124392759353, |
|
"learning_rate": 5.482091883361571e-06, |
|
"loss": 0.9915, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.6603841971728888, |
|
"grad_norm": 2.224885148056625, |
|
"learning_rate": 5.461141847528655e-06, |
|
"loss": 0.9071, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.6611090974990939, |
|
"grad_norm": 3.186202824223461, |
|
"learning_rate": 5.440216871827004e-06, |
|
"loss": 1.0746, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.6618339978252991, |
|
"grad_norm": 1.7683023401240061, |
|
"learning_rate": 5.419317071788461e-06, |
|
"loss": 0.9636, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.6625588981515041, |
|
"grad_norm": 3.098972560392732, |
|
"learning_rate": 5.398442562805859e-06, |
|
"loss": 1.1527, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.6632837984777094, |
|
"grad_norm": 2.2445548524254946, |
|
"learning_rate": 5.3775934601324094e-06, |
|
"loss": 1.0563, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6640086988039144, |
|
"grad_norm": 1.739464246848828, |
|
"learning_rate": 5.3567698788810366e-06, |
|
"loss": 1.0422, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.6647335991301196, |
|
"grad_norm": 2.0974710880958485, |
|
"learning_rate": 5.335971934023757e-06, |
|
"loss": 0.899, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.6654584994563247, |
|
"grad_norm": 2.029612124614373, |
|
"learning_rate": 5.315199740391044e-06, |
|
"loss": 0.9541, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.6661833997825299, |
|
"grad_norm": 2.2132638391788158, |
|
"learning_rate": 5.294453412671192e-06, |
|
"loss": 1.0131, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.666908300108735, |
|
"grad_norm": 2.5732318578474107, |
|
"learning_rate": 5.273733065409683e-06, |
|
"loss": 0.9976, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6676332004349402, |
|
"grad_norm": 2.265338233038655, |
|
"learning_rate": 5.2530388130085555e-06, |
|
"loss": 0.9039, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.6683581007611453, |
|
"grad_norm": 2.006264114627049, |
|
"learning_rate": 5.232370769725769e-06, |
|
"loss": 0.9893, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.6690830010873505, |
|
"grad_norm": 2.0904960315209076, |
|
"learning_rate": 5.2117290496745764e-06, |
|
"loss": 0.9729, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.6698079014135556, |
|
"grad_norm": 1.943083190146926, |
|
"learning_rate": 5.191113766822905e-06, |
|
"loss": 0.9807, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.6705328017397608, |
|
"grad_norm": 2.661838718236442, |
|
"learning_rate": 5.1705250349927004e-06, |
|
"loss": 0.9773, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6712577020659659, |
|
"grad_norm": 2.462190516757415, |
|
"learning_rate": 5.149962967859321e-06, |
|
"loss": 1.0769, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.6719826023921711, |
|
"grad_norm": 2.142823487160251, |
|
"learning_rate": 5.129427678950902e-06, |
|
"loss": 0.938, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.6727075027183762, |
|
"grad_norm": 1.972351101050343, |
|
"learning_rate": 5.108919281647728e-06, |
|
"loss": 0.9529, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.6734324030445814, |
|
"grad_norm": 1.9370633685514163, |
|
"learning_rate": 5.088437889181612e-06, |
|
"loss": 1.075, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.6741573033707865, |
|
"grad_norm": 1.975590280335086, |
|
"learning_rate": 5.067983614635263e-06, |
|
"loss": 0.9653, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6748822036969917, |
|
"grad_norm": 2.187164922126263, |
|
"learning_rate": 5.04755657094167e-06, |
|
"loss": 1.1111, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.6756071040231968, |
|
"grad_norm": 1.9292793498955334, |
|
"learning_rate": 5.027156870883468e-06, |
|
"loss": 0.9759, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.676332004349402, |
|
"grad_norm": 1.9011667121841174, |
|
"learning_rate": 5.006784627092322e-06, |
|
"loss": 0.9786, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.6770569046756071, |
|
"grad_norm": 1.8408728752655816, |
|
"learning_rate": 4.986439952048313e-06, |
|
"loss": 0.8755, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.6777818050018123, |
|
"grad_norm": 2.2274071731605747, |
|
"learning_rate": 4.9661229580792995e-06, |
|
"loss": 0.9514, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6785067053280174, |
|
"grad_norm": 2.11818671076986, |
|
"learning_rate": 4.945833757360308e-06, |
|
"loss": 0.9173, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.6792316056542226, |
|
"grad_norm": 2.244914641056037, |
|
"learning_rate": 4.925572461912912e-06, |
|
"loss": 0.9013, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.6799565059804277, |
|
"grad_norm": 2.175683562082561, |
|
"learning_rate": 4.905339183604614e-06, |
|
"loss": 1.0385, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.6806814063066329, |
|
"grad_norm": 1.8770079340690111, |
|
"learning_rate": 4.885134034148225e-06, |
|
"loss": 0.9151, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.681406306632838, |
|
"grad_norm": 2.071578077945829, |
|
"learning_rate": 4.864957125101254e-06, |
|
"loss": 1.0396, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6821312069590432, |
|
"grad_norm": 2.6472781206388203, |
|
"learning_rate": 4.844808567865283e-06, |
|
"loss": 1.0126, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.6828561072852483, |
|
"grad_norm": 1.9436626348748187, |
|
"learning_rate": 4.824688473685362e-06, |
|
"loss": 1.0218, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.6835810076114535, |
|
"grad_norm": 2.986623329743054, |
|
"learning_rate": 4.804596953649381e-06, |
|
"loss": 1.0179, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.6843059079376586, |
|
"grad_norm": 2.0261869595811843, |
|
"learning_rate": 4.784534118687477e-06, |
|
"loss": 0.9552, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.6850308082638638, |
|
"grad_norm": 2.084576288901219, |
|
"learning_rate": 4.764500079571403e-06, |
|
"loss": 0.9645, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6857557085900688, |
|
"grad_norm": 1.941612444910497, |
|
"learning_rate": 4.744494946913923e-06, |
|
"loss": 0.9273, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.686480608916274, |
|
"grad_norm": 2.3028054945708005, |
|
"learning_rate": 4.724518831168203e-06, |
|
"loss": 0.8653, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.6872055092424791, |
|
"grad_norm": 2.587974290597117, |
|
"learning_rate": 4.704571842627196e-06, |
|
"loss": 0.9328, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.6879304095686843, |
|
"grad_norm": 2.745061826423869, |
|
"learning_rate": 4.684654091423043e-06, |
|
"loss": 1.011, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.6886553098948894, |
|
"grad_norm": 2.084724138668754, |
|
"learning_rate": 4.664765687526455e-06, |
|
"loss": 1.031, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6893802102210946, |
|
"grad_norm": 2.409381235205495, |
|
"learning_rate": 4.644906740746108e-06, |
|
"loss": 0.9139, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.6901051105472997, |
|
"grad_norm": 2.3347733533309905, |
|
"learning_rate": 4.6250773607280375e-06, |
|
"loss": 0.9462, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.6908300108735049, |
|
"grad_norm": 2.6210504366809095, |
|
"learning_rate": 4.605277656955043e-06, |
|
"loss": 1.0772, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.69155491119971, |
|
"grad_norm": 1.971545084083224, |
|
"learning_rate": 4.585507738746064e-06, |
|
"loss": 0.9594, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.6922798115259152, |
|
"grad_norm": 2.369021752491031, |
|
"learning_rate": 4.5657677152555925e-06, |
|
"loss": 1.0682, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6930047118521203, |
|
"grad_norm": 2.714912007155751, |
|
"learning_rate": 4.5460576954730585e-06, |
|
"loss": 1.0267, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.6937296121783255, |
|
"grad_norm": 2.0355578348064727, |
|
"learning_rate": 4.526377788222239e-06, |
|
"loss": 1.0435, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.6944545125045306, |
|
"grad_norm": 2.1978211554878953, |
|
"learning_rate": 4.506728102160653e-06, |
|
"loss": 1.008, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.6951794128307358, |
|
"grad_norm": 2.007198364190231, |
|
"learning_rate": 4.487108745778958e-06, |
|
"loss": 1.0288, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.6959043131569409, |
|
"grad_norm": 2.019964605246266, |
|
"learning_rate": 4.467519827400357e-06, |
|
"loss": 1.1013, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6966292134831461, |
|
"grad_norm": 2.0193639156586136, |
|
"learning_rate": 4.447961455179995e-06, |
|
"loss": 0.9918, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.6973541138093512, |
|
"grad_norm": 1.9471456399675218, |
|
"learning_rate": 4.428433737104362e-06, |
|
"loss": 0.9593, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.6980790141355564, |
|
"grad_norm": 2.1469665342189206, |
|
"learning_rate": 4.408936780990708e-06, |
|
"loss": 0.8953, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.6988039144617615, |
|
"grad_norm": 2.5540392388335453, |
|
"learning_rate": 4.3894706944864305e-06, |
|
"loss": 1.0014, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.6995288147879667, |
|
"grad_norm": 2.8104373863238177, |
|
"learning_rate": 4.370035585068493e-06, |
|
"loss": 1.1114, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.7002537151141718, |
|
"grad_norm": 2.30574506834477, |
|
"learning_rate": 4.350631560042821e-06, |
|
"loss": 0.9957, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.700978615440377, |
|
"grad_norm": 2.4431897548258523, |
|
"learning_rate": 4.331258726543723e-06, |
|
"loss": 1.007, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.7017035157665821, |
|
"grad_norm": 2.2915344664162918, |
|
"learning_rate": 4.311917191533288e-06, |
|
"loss": 1.022, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.7024284160927873, |
|
"grad_norm": 1.6307353345438238, |
|
"learning_rate": 4.292607061800796e-06, |
|
"loss": 0.9134, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.7031533164189924, |
|
"grad_norm": 2.094209654163443, |
|
"learning_rate": 4.273328443962137e-06, |
|
"loss": 1.0199, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7038782167451976, |
|
"grad_norm": 2.6089783909619397, |
|
"learning_rate": 4.254081444459209e-06, |
|
"loss": 1.034, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.7046031170714027, |
|
"grad_norm": 2.756310969599043, |
|
"learning_rate": 4.234866169559342e-06, |
|
"loss": 0.9988, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.7053280173976079, |
|
"grad_norm": 2.1627058695633554, |
|
"learning_rate": 4.2156827253547095e-06, |
|
"loss": 0.8527, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.706052917723813, |
|
"grad_norm": 2.128748230219529, |
|
"learning_rate": 4.1965312177617355e-06, |
|
"loss": 0.9118, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.7067778180500182, |
|
"grad_norm": 2.3974561539624117, |
|
"learning_rate": 4.177411752520514e-06, |
|
"loss": 1.0645, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.7075027183762232, |
|
"grad_norm": 2.3317825972998016, |
|
"learning_rate": 4.1583244351942275e-06, |
|
"loss": 1.0144, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.7082276187024285, |
|
"grad_norm": 2.2782371205197585, |
|
"learning_rate": 4.139269371168562e-06, |
|
"loss": 0.9467, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.7089525190286335, |
|
"grad_norm": 1.9702225529384383, |
|
"learning_rate": 4.1202466656511245e-06, |
|
"loss": 0.9395, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 2.056174409141514, |
|
"learning_rate": 4.101256423670862e-06, |
|
"loss": 0.942, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.7104023196810438, |
|
"grad_norm": 2.7573170909057474, |
|
"learning_rate": 4.082298750077485e-06, |
|
"loss": 0.8837, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.711127220007249, |
|
"grad_norm": 2.79548321777382, |
|
"learning_rate": 4.06337374954088e-06, |
|
"loss": 0.9387, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.7118521203334541, |
|
"grad_norm": 2.7061864575287875, |
|
"learning_rate": 4.04448152655055e-06, |
|
"loss": 1.114, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.7125770206596593, |
|
"grad_norm": 2.2826349591175488, |
|
"learning_rate": 4.025622185415014e-06, |
|
"loss": 0.9945, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.7133019209858644, |
|
"grad_norm": 2.2626152003002176, |
|
"learning_rate": 4.006795830261247e-06, |
|
"loss": 0.9539, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.7140268213120696, |
|
"grad_norm": 1.9616752319508053, |
|
"learning_rate": 3.988002565034096e-06, |
|
"loss": 0.9027, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.7147517216382747, |
|
"grad_norm": 2.2013038556275704, |
|
"learning_rate": 3.9692424934957175e-06, |
|
"loss": 0.9855, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.7154766219644799, |
|
"grad_norm": 2.2452819506033697, |
|
"learning_rate": 3.950515719224991e-06, |
|
"loss": 0.9616, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.716201522290685, |
|
"grad_norm": 2.298977632173436, |
|
"learning_rate": 3.9318223456169604e-06, |
|
"loss": 0.9073, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.7169264226168902, |
|
"grad_norm": 2.838544913848763, |
|
"learning_rate": 3.91316247588225e-06, |
|
"loss": 1.1417, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.7176513229430953, |
|
"grad_norm": 2.131182681333421, |
|
"learning_rate": 3.8945362130465035e-06, |
|
"loss": 0.9419, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.7183762232693005, |
|
"grad_norm": 2.065261545069331, |
|
"learning_rate": 3.8759436599498125e-06, |
|
"loss": 0.9205, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.7191011235955056, |
|
"grad_norm": 2.1618911363647086, |
|
"learning_rate": 3.857384919246154e-06, |
|
"loss": 0.9108, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.7198260239217108, |
|
"grad_norm": 2.7172592088098044, |
|
"learning_rate": 3.838860093402813e-06, |
|
"loss": 1.0987, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.7205509242479159, |
|
"grad_norm": 2.6032460817441203, |
|
"learning_rate": 3.820369284699823e-06, |
|
"loss": 0.9295, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.7212758245741211, |
|
"grad_norm": 2.0455131957130717, |
|
"learning_rate": 3.8019125952294045e-06, |
|
"loss": 0.8968, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.7220007249003262, |
|
"grad_norm": 2.4549976135759866, |
|
"learning_rate": 3.783490126895384e-06, |
|
"loss": 0.9866, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.7227256252265314, |
|
"grad_norm": 2.38725429694958, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 1.0763, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.7234505255527365, |
|
"grad_norm": 2.447633489268824, |
|
"learning_rate": 3.746748260306635e-06, |
|
"loss": 1.0442, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.7241754258789417, |
|
"grad_norm": 2.015059323195586, |
|
"learning_rate": 3.7284290649126176e-06, |
|
"loss": 1.0369, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.7249003262051468, |
|
"grad_norm": 2.1439701734436944, |
|
"learning_rate": 3.7101444963753096e-06, |
|
"loss": 0.9838, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.725625226531352, |
|
"grad_norm": 2.196165996622997, |
|
"learning_rate": 3.691894655648225e-06, |
|
"loss": 1.0422, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.7263501268575571, |
|
"grad_norm": 2.1072970110294498, |
|
"learning_rate": 3.6736796434931443e-06, |
|
"loss": 1.0379, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.7270750271837623, |
|
"grad_norm": 2.052889977594413, |
|
"learning_rate": 3.6554995604795427e-06, |
|
"loss": 0.9731, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.7277999275099674, |
|
"grad_norm": 2.1849621710221605, |
|
"learning_rate": 3.637354506984051e-06, |
|
"loss": 0.9397, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.7285248278361726, |
|
"grad_norm": 2.302003380708732, |
|
"learning_rate": 3.619244583189877e-06, |
|
"loss": 1.0793, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.7292497281623777, |
|
"grad_norm": 2.4873231398689692, |
|
"learning_rate": 3.6011698890862913e-06, |
|
"loss": 0.9777, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.7299746284885829, |
|
"grad_norm": 2.06017307338732, |
|
"learning_rate": 3.583130524468038e-06, |
|
"loss": 0.8838, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.730699528814788, |
|
"grad_norm": 1.9994143545054046, |
|
"learning_rate": 3.565126588934803e-06, |
|
"loss": 0.9971, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.7314244291409931, |
|
"grad_norm": 1.8151755246614727, |
|
"learning_rate": 3.547158181890661e-06, |
|
"loss": 0.9291, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.7321493294671982, |
|
"grad_norm": 1.9292646278494385, |
|
"learning_rate": 3.529225402543518e-06, |
|
"loss": 1.0508, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7328742297934034, |
|
"grad_norm": 1.9667040655974664, |
|
"learning_rate": 3.5113283499045846e-06, |
|
"loss": 1.0381, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.7335991301196085, |
|
"grad_norm": 2.490956546908503, |
|
"learning_rate": 3.4934671227878046e-06, |
|
"loss": 0.9943, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.7343240304458137, |
|
"grad_norm": 1.7574463836197582, |
|
"learning_rate": 3.475641819809327e-06, |
|
"loss": 0.9338, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.7350489307720188, |
|
"grad_norm": 2.8098119938777795, |
|
"learning_rate": 3.4578525393869444e-06, |
|
"loss": 1.0599, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.735773831098224, |
|
"grad_norm": 2.1547229502003877, |
|
"learning_rate": 3.4400993797395664e-06, |
|
"loss": 0.9071, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.7364987314244291, |
|
"grad_norm": 2.3529684580357673, |
|
"learning_rate": 3.42238243888668e-06, |
|
"loss": 0.8903, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.7372236317506343, |
|
"grad_norm": 3.3656369614826076, |
|
"learning_rate": 3.4047018146477873e-06, |
|
"loss": 0.9104, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.7379485320768394, |
|
"grad_norm": 2.3603322866432315, |
|
"learning_rate": 3.3870576046418825e-06, |
|
"loss": 0.9784, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.7386734324030446, |
|
"grad_norm": 2.321872140130776, |
|
"learning_rate": 3.369449906286907e-06, |
|
"loss": 1.0385, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.7393983327292497, |
|
"grad_norm": 2.1167495374440315, |
|
"learning_rate": 3.351878816799209e-06, |
|
"loss": 1.0955, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.7401232330554549, |
|
"grad_norm": 1.786896281579396, |
|
"learning_rate": 3.3343444331930207e-06, |
|
"loss": 0.991, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.74084813338166, |
|
"grad_norm": 2.2746758416106427, |
|
"learning_rate": 3.316846852279907e-06, |
|
"loss": 0.9766, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.7415730337078652, |
|
"grad_norm": 2.6138994872082097, |
|
"learning_rate": 3.2993861706682274e-06, |
|
"loss": 1.1535, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.7422979340340703, |
|
"grad_norm": 2.6165034172242176, |
|
"learning_rate": 3.281962484762622e-06, |
|
"loss": 0.9753, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.7430228343602755, |
|
"grad_norm": 1.9155784149193085, |
|
"learning_rate": 3.264575890763463e-06, |
|
"loss": 0.9675, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.7437477346864806, |
|
"grad_norm": 2.0624358920096215, |
|
"learning_rate": 3.247226484666339e-06, |
|
"loss": 1.0538, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.7444726350126858, |
|
"grad_norm": 3.5846085824449987, |
|
"learning_rate": 3.2299143622615036e-06, |
|
"loss": 1.107, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.7451975353388909, |
|
"grad_norm": 2.46962567912047, |
|
"learning_rate": 3.2126396191333643e-06, |
|
"loss": 0.9327, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.7459224356650961, |
|
"grad_norm": 2.4139413136075225, |
|
"learning_rate": 3.195402350659945e-06, |
|
"loss": 1.098, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.7466473359913012, |
|
"grad_norm": 2.017065330116569, |
|
"learning_rate": 3.178202652012363e-06, |
|
"loss": 0.9482, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.7473722363175064, |
|
"grad_norm": 2.404901697722625, |
|
"learning_rate": 3.161040618154315e-06, |
|
"loss": 1.0855, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.7480971366437115, |
|
"grad_norm": 1.8467098745774249, |
|
"learning_rate": 3.143916343841523e-06, |
|
"loss": 1.0062, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.7488220369699167, |
|
"grad_norm": 2.5314983525811883, |
|
"learning_rate": 3.1268299236212395e-06, |
|
"loss": 0.8812, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.7495469372961218, |
|
"grad_norm": 2.3038281210461697, |
|
"learning_rate": 3.1097814518317127e-06, |
|
"loss": 1.0849, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.750271837622327, |
|
"grad_norm": 2.4284883217355846, |
|
"learning_rate": 3.092771022601676e-06, |
|
"loss": 1.0175, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.750271837622327, |
|
"eval_loss": 1.7681310176849365, |
|
"eval_runtime": 1124.5222, |
|
"eval_samples_per_second": 15.699, |
|
"eval_steps_per_second": 0.245, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.7509967379485321, |
|
"grad_norm": 2.1284406550026986, |
|
"learning_rate": 3.0757987298498106e-06, |
|
"loss": 1.0076, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.7517216382747373, |
|
"grad_norm": 2.22165220425758, |
|
"learning_rate": 3.0588646672842404e-06, |
|
"loss": 0.951, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.7524465386009423, |
|
"grad_norm": 2.5711481889523804, |
|
"learning_rate": 3.0419689284020126e-06, |
|
"loss": 1.0273, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.7531714389271476, |
|
"grad_norm": 2.0814034949241202, |
|
"learning_rate": 3.0251116064885767e-06, |
|
"loss": 0.8638, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.7538963392533526, |
|
"grad_norm": 2.3755493898944664, |
|
"learning_rate": 3.0082927946172837e-06, |
|
"loss": 0.9276, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.7546212395795578, |
|
"grad_norm": 1.860045261304839, |
|
"learning_rate": 2.9915125856488424e-06, |
|
"loss": 0.8793, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.7553461399057629, |
|
"grad_norm": 2.166915926531366, |
|
"learning_rate": 2.974771072230842e-06, |
|
"loss": 1.035, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.7560710402319681, |
|
"grad_norm": 2.1712284867002745, |
|
"learning_rate": 2.958068346797217e-06, |
|
"loss": 0.9518, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.7567959405581732, |
|
"grad_norm": 2.888919414338719, |
|
"learning_rate": 2.941404501567743e-06, |
|
"loss": 1.0718, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.7575208408843784, |
|
"grad_norm": 2.230289529665858, |
|
"learning_rate": 2.9247796285475362e-06, |
|
"loss": 1.0045, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.7582457412105835, |
|
"grad_norm": 2.4680966008183534, |
|
"learning_rate": 2.90819381952653e-06, |
|
"loss": 1.1622, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.7589706415367887, |
|
"grad_norm": 2.965957230775603, |
|
"learning_rate": 2.8916471660789813e-06, |
|
"loss": 0.9385, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.7596955418629938, |
|
"grad_norm": 2.3702554177333104, |
|
"learning_rate": 2.875139759562954e-06, |
|
"loss": 0.9725, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.760420442189199, |
|
"grad_norm": 2.1238051733926113, |
|
"learning_rate": 2.8586716911198244e-06, |
|
"loss": 0.9945, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.7611453425154041, |
|
"grad_norm": 1.8445910518867181, |
|
"learning_rate": 2.8422430516737733e-06, |
|
"loss": 0.959, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7618702428416093, |
|
"grad_norm": 2.4547009955302177, |
|
"learning_rate": 2.825853931931283e-06, |
|
"loss": 0.9879, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.7625951431678144, |
|
"grad_norm": 2.002478666013241, |
|
"learning_rate": 2.80950442238064e-06, |
|
"loss": 0.9518, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.7633200434940196, |
|
"grad_norm": 2.3705241165870197, |
|
"learning_rate": 2.793194613291431e-06, |
|
"loss": 1.0771, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.7640449438202247, |
|
"grad_norm": 2.5129264862370957, |
|
"learning_rate": 2.7769245947140468e-06, |
|
"loss": 1.0603, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.7647698441464299, |
|
"grad_norm": 1.8777445092267904, |
|
"learning_rate": 2.7606944564791914e-06, |
|
"loss": 0.8863, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.765494744472635, |
|
"grad_norm": 2.0066823550902164, |
|
"learning_rate": 2.7445042881973727e-06, |
|
"loss": 0.9708, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.7662196447988402, |
|
"grad_norm": 1.9969442657507075, |
|
"learning_rate": 2.7283541792584165e-06, |
|
"loss": 1.0216, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.7669445451250453, |
|
"grad_norm": 2.043307797498135, |
|
"learning_rate": 2.712244218830973e-06, |
|
"loss": 1.0059, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.7676694454512505, |
|
"grad_norm": 2.05640044579258, |
|
"learning_rate": 2.6961744958620195e-06, |
|
"loss": 0.977, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.7683943457774556, |
|
"grad_norm": 2.0705940087415353, |
|
"learning_rate": 2.6801450990763767e-06, |
|
"loss": 1.0993, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.7691192461036608, |
|
"grad_norm": 2.2801073634969757, |
|
"learning_rate": 2.664156116976212e-06, |
|
"loss": 1.1062, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.7698441464298659, |
|
"grad_norm": 1.8809467398522293, |
|
"learning_rate": 2.648207637840554e-06, |
|
"loss": 0.9475, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.7705690467560711, |
|
"grad_norm": 1.9284658912112769, |
|
"learning_rate": 2.6322997497248036e-06, |
|
"loss": 0.8972, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.7712939470822762, |
|
"grad_norm": 2.1543128958058384, |
|
"learning_rate": 2.616432540460255e-06, |
|
"loss": 0.9557, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.7720188474084814, |
|
"grad_norm": 1.9290768627484227, |
|
"learning_rate": 2.6006060976535974e-06, |
|
"loss": 0.9678, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.7727437477346865, |
|
"grad_norm": 3.3836187115327174, |
|
"learning_rate": 2.58482050868644e-06, |
|
"loss": 1.0705, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.7734686480608917, |
|
"grad_norm": 2.379431503328967, |
|
"learning_rate": 2.5690758607148304e-06, |
|
"loss": 1.034, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 2.1978701615866667, |
|
"learning_rate": 2.553372240668769e-06, |
|
"loss": 0.9683, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.774918448713302, |
|
"grad_norm": 2.334219947775647, |
|
"learning_rate": 2.5377097352517332e-06, |
|
"loss": 0.9329, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.775643349039507, |
|
"grad_norm": 1.8982866415446444, |
|
"learning_rate": 2.522088430940195e-06, |
|
"loss": 0.843, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.7763682493657122, |
|
"grad_norm": 1.8718401538017733, |
|
"learning_rate": 2.5065084139831443e-06, |
|
"loss": 0.872, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.7770931496919173, |
|
"grad_norm": 2.1329592476566663, |
|
"learning_rate": 2.4909697704016158e-06, |
|
"loss": 0.9499, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.7778180500181225, |
|
"grad_norm": 2.3116722160450855, |
|
"learning_rate": 2.4754725859882077e-06, |
|
"loss": 1.071, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.7785429503443276, |
|
"grad_norm": 2.411486503556151, |
|
"learning_rate": 2.4600169463066213e-06, |
|
"loss": 0.9443, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.7792678506705328, |
|
"grad_norm": 1.8955688908325088, |
|
"learning_rate": 2.4446029366911708e-06, |
|
"loss": 1.0264, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7799927509967379, |
|
"grad_norm": 2.2466514583663972, |
|
"learning_rate": 2.4292306422463218e-06, |
|
"loss": 1.0286, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.7807176513229431, |
|
"grad_norm": 2.605174653703314, |
|
"learning_rate": 2.4139001478462233e-06, |
|
"loss": 1.0762, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.7814425516491482, |
|
"grad_norm": 2.125905431943264, |
|
"learning_rate": 2.3986115381342347e-06, |
|
"loss": 1.0855, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.7821674519753534, |
|
"grad_norm": 2.1131544647871685, |
|
"learning_rate": 2.3833648975224588e-06, |
|
"loss": 0.9731, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.7828923523015585, |
|
"grad_norm": 2.1666492164090068, |
|
"learning_rate": 2.3681603101912785e-06, |
|
"loss": 1.1026, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7836172526277637, |
|
"grad_norm": 1.902794404555453, |
|
"learning_rate": 2.3529978600888915e-06, |
|
"loss": 0.9013, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.7843421529539688, |
|
"grad_norm": 1.904459392353737, |
|
"learning_rate": 2.3378776309308414e-06, |
|
"loss": 0.9706, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.785067053280174, |
|
"grad_norm": 1.6870600380072511, |
|
"learning_rate": 2.3227997061995622e-06, |
|
"loss": 0.8814, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.7857919536063791, |
|
"grad_norm": 2.034799823983302, |
|
"learning_rate": 2.307764169143919e-06, |
|
"loss": 0.9905, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.7865168539325843, |
|
"grad_norm": 2.148503285324599, |
|
"learning_rate": 2.292771102778739e-06, |
|
"loss": 1.0019, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.7872417542587894, |
|
"grad_norm": 1.8765819842880695, |
|
"learning_rate": 2.277820589884359e-06, |
|
"loss": 0.9529, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.7879666545849946, |
|
"grad_norm": 2.6372186463117666, |
|
"learning_rate": 2.26291271300617e-06, |
|
"loss": 0.9756, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.7886915549111997, |
|
"grad_norm": 2.129327741523149, |
|
"learning_rate": 2.248047554454157e-06, |
|
"loss": 0.9456, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.7894164552374049, |
|
"grad_norm": 1.9575868058617358, |
|
"learning_rate": 2.2332251963024475e-06, |
|
"loss": 0.9592, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.79014135556361, |
|
"grad_norm": 2.239457152716971, |
|
"learning_rate": 2.2184457203888598e-06, |
|
"loss": 1.0179, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7908662558898152, |
|
"grad_norm": 2.0380730008597463, |
|
"learning_rate": 2.2037092083144463e-06, |
|
"loss": 1.0047, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.7915911562160203, |
|
"grad_norm": 2.651283946960534, |
|
"learning_rate": 2.1890157414430448e-06, |
|
"loss": 0.9971, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.7923160565422255, |
|
"grad_norm": 1.9224733064527182, |
|
"learning_rate": 2.1743654009008362e-06, |
|
"loss": 0.9758, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.7930409568684306, |
|
"grad_norm": 2.042210473646339, |
|
"learning_rate": 2.1597582675758833e-06, |
|
"loss": 0.9157, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.7937658571946358, |
|
"grad_norm": 1.8843195169105176, |
|
"learning_rate": 2.1451944221176966e-06, |
|
"loss": 0.8876, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7944907575208409, |
|
"grad_norm": 1.7432937559280743, |
|
"learning_rate": 2.1306739449367796e-06, |
|
"loss": 0.966, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.7952156578470461, |
|
"grad_norm": 2.4904321442188073, |
|
"learning_rate": 2.116196916204193e-06, |
|
"loss": 1.0147, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.7959405581732512, |
|
"grad_norm": 1.867849742985621, |
|
"learning_rate": 2.1017634158511037e-06, |
|
"loss": 0.8396, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.7966654584994564, |
|
"grad_norm": 2.176037217510385, |
|
"learning_rate": 2.0873735235683535e-06, |
|
"loss": 1.0433, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.7973903588256614, |
|
"grad_norm": 1.8093979197769543, |
|
"learning_rate": 2.073027318806008e-06, |
|
"loss": 1.0163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7981152591518667, |
|
"grad_norm": 2.317013372213296, |
|
"learning_rate": 2.058724880772929e-06, |
|
"loss": 0.934, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.7988401594780717, |
|
"grad_norm": 1.8950042969585994, |
|
"learning_rate": 2.044466288436322e-06, |
|
"loss": 0.9235, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.799565059804277, |
|
"grad_norm": 2.26981624646591, |
|
"learning_rate": 2.030251620521325e-06, |
|
"loss": 0.9664, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.800289960130482, |
|
"grad_norm": 1.9558043676284143, |
|
"learning_rate": 2.0160809555105467e-06, |
|
"loss": 1.0178, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.8010148604566872, |
|
"grad_norm": 2.919669591456051, |
|
"learning_rate": 2.0019543716436474e-06, |
|
"loss": 0.9538, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.8017397607828923, |
|
"grad_norm": 2.0335272734209924, |
|
"learning_rate": 1.9878719469169104e-06, |
|
"loss": 0.8791, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.8024646611090975, |
|
"grad_norm": 2.171132136552404, |
|
"learning_rate": 1.9738337590827918e-06, |
|
"loss": 0.9331, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.8031895614353026, |
|
"grad_norm": 1.942377740059999, |
|
"learning_rate": 1.9598398856495227e-06, |
|
"loss": 0.8755, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.8039144617615078, |
|
"grad_norm": 2.135810469663323, |
|
"learning_rate": 1.9458904038806537e-06, |
|
"loss": 1.0203, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.8046393620877129, |
|
"grad_norm": 2.3944483070855362, |
|
"learning_rate": 1.9319853907946393e-06, |
|
"loss": 0.9595, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8053642624139181, |
|
"grad_norm": 1.9622747573911234, |
|
"learning_rate": 1.918124923164414e-06, |
|
"loss": 0.9016, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.8060891627401232, |
|
"grad_norm": 2.703351031078299, |
|
"learning_rate": 1.9043090775169604e-06, |
|
"loss": 1.0531, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.8068140630663284, |
|
"grad_norm": 2.1339961008284063, |
|
"learning_rate": 1.890537930132903e-06, |
|
"loss": 1.0974, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.8075389633925335, |
|
"grad_norm": 2.6633749557025896, |
|
"learning_rate": 1.8768115570460677e-06, |
|
"loss": 1.0197, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.8082638637187387, |
|
"grad_norm": 1.7899870312478274, |
|
"learning_rate": 1.8631300340430747e-06, |
|
"loss": 1.0164, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.8089887640449438, |
|
"grad_norm": 1.786288489816615, |
|
"learning_rate": 1.8494934366629057e-06, |
|
"loss": 1.0085, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.809713664371149, |
|
"grad_norm": 2.213791559175714, |
|
"learning_rate": 1.8359018401965156e-06, |
|
"loss": 1.0711, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.8104385646973541, |
|
"grad_norm": 2.227118578843932, |
|
"learning_rate": 1.8223553196863852e-06, |
|
"loss": 1.0198, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.8111634650235593, |
|
"grad_norm": 1.9847568274305198, |
|
"learning_rate": 1.808853949926125e-06, |
|
"loss": 0.9065, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.8118883653497644, |
|
"grad_norm": 1.9370018576926253, |
|
"learning_rate": 1.795397805460053e-06, |
|
"loss": 0.9994, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.8126132656759696, |
|
"grad_norm": 2.2601268831652424, |
|
"learning_rate": 1.781986960582789e-06, |
|
"loss": 0.9872, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.8133381660021747, |
|
"grad_norm": 2.3214803090685265, |
|
"learning_rate": 1.7686214893388509e-06, |
|
"loss": 0.9926, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.8140630663283799, |
|
"grad_norm": 2.5777964336742647, |
|
"learning_rate": 1.7553014655222255e-06, |
|
"loss": 0.9657, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.814787966654585, |
|
"grad_norm": 1.9895856551523463, |
|
"learning_rate": 1.7420269626759822e-06, |
|
"loss": 0.8018, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.8155128669807902, |
|
"grad_norm": 2.071389074929151, |
|
"learning_rate": 1.7287980540918548e-06, |
|
"loss": 1.1168, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.8162377673069953, |
|
"grad_norm": 2.3768284586683786, |
|
"learning_rate": 1.7156148128098337e-06, |
|
"loss": 1.0561, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.8169626676332005, |
|
"grad_norm": 2.108829554647349, |
|
"learning_rate": 1.7024773116177839e-06, |
|
"loss": 1.0184, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.8176875679594056, |
|
"grad_norm": 1.9145136763918424, |
|
"learning_rate": 1.6893856230510152e-06, |
|
"loss": 1.0269, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.8184124682856108, |
|
"grad_norm": 1.984940928437078, |
|
"learning_rate": 1.6763398193919034e-06, |
|
"loss": 0.9718, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.8191373686118159, |
|
"grad_norm": 2.491917383583085, |
|
"learning_rate": 1.6633399726694755e-06, |
|
"loss": 0.9837, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.819862268938021, |
|
"grad_norm": 2.1889459447102295, |
|
"learning_rate": 1.6503861546590216e-06, |
|
"loss": 0.8859, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.8205871692642261, |
|
"grad_norm": 2.2855735715808807, |
|
"learning_rate": 1.6374784368817032e-06, |
|
"loss": 0.8568, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.8213120695904313, |
|
"grad_norm": 2.4119276770093245, |
|
"learning_rate": 1.6246168906041405e-06, |
|
"loss": 1.0357, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.8220369699166364, |
|
"grad_norm": 2.2497941042194207, |
|
"learning_rate": 1.6118015868380387e-06, |
|
"loss": 1.0791, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.8227618702428416, |
|
"grad_norm": 2.6543479685385036, |
|
"learning_rate": 1.5990325963397779e-06, |
|
"loss": 1.0276, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.8234867705690467, |
|
"grad_norm": 1.9249733038058623, |
|
"learning_rate": 1.5863099896100343e-06, |
|
"loss": 0.966, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.8242116708952519, |
|
"grad_norm": 2.2761621468500497, |
|
"learning_rate": 1.5736338368933978e-06, |
|
"loss": 0.8657, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.824936571221457, |
|
"grad_norm": 1.9374428897603755, |
|
"learning_rate": 1.561004208177962e-06, |
|
"loss": 1.0411, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.8256614715476622, |
|
"grad_norm": 2.13221264820333, |
|
"learning_rate": 1.5484211731949572e-06, |
|
"loss": 1.0386, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.8263863718738673, |
|
"grad_norm": 2.1816254648738194, |
|
"learning_rate": 1.5358848014183547e-06, |
|
"loss": 1.0673, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8271112722000725, |
|
"grad_norm": 2.333905932739388, |
|
"learning_rate": 1.523395162064486e-06, |
|
"loss": 0.8971, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.8278361725262776, |
|
"grad_norm": 1.9732510886779988, |
|
"learning_rate": 1.5109523240916702e-06, |
|
"loss": 0.9907, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.8285610728524828, |
|
"grad_norm": 2.0768050361793606, |
|
"learning_rate": 1.4985563561998185e-06, |
|
"loss": 1.0126, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.8292859731786879, |
|
"grad_norm": 2.0071930948425933, |
|
"learning_rate": 1.4862073268300558e-06, |
|
"loss": 0.9991, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.8300108735048931, |
|
"grad_norm": 2.0782467893964593, |
|
"learning_rate": 1.4739053041643536e-06, |
|
"loss": 0.9887, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.8307357738310982, |
|
"grad_norm": 2.171294999994677, |
|
"learning_rate": 1.4616503561251538e-06, |
|
"loss": 0.9402, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.8314606741573034, |
|
"grad_norm": 2.407000985423698, |
|
"learning_rate": 1.4494425503749788e-06, |
|
"loss": 0.981, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.8321855744835085, |
|
"grad_norm": 1.9223688184324994, |
|
"learning_rate": 1.437281954316071e-06, |
|
"loss": 0.9358, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.8329104748097137, |
|
"grad_norm": 1.9968819662132635, |
|
"learning_rate": 1.4251686350900152e-06, |
|
"loss": 0.9298, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.8336353751359188, |
|
"grad_norm": 2.136971059899701, |
|
"learning_rate": 1.4131026595773689e-06, |
|
"loss": 0.9215, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.834360275462124, |
|
"grad_norm": 1.8337681676570194, |
|
"learning_rate": 1.4010840943972982e-06, |
|
"loss": 1.0268, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.8350851757883291, |
|
"grad_norm": 2.1977568849453744, |
|
"learning_rate": 1.3891130059072032e-06, |
|
"loss": 0.9156, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.8358100761145343, |
|
"grad_norm": 2.255344231343079, |
|
"learning_rate": 1.3771894602023483e-06, |
|
"loss": 1.087, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.8365349764407394, |
|
"grad_norm": 2.2385193595399606, |
|
"learning_rate": 1.3653135231155079e-06, |
|
"loss": 1.0131, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.8372598767669446, |
|
"grad_norm": 1.9074930266008068, |
|
"learning_rate": 1.353485260216596e-06, |
|
"loss": 1.046, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.8379847770931497, |
|
"grad_norm": 2.3457699506131124, |
|
"learning_rate": 1.3417047368123103e-06, |
|
"loss": 1.0063, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 2.195479709372169, |
|
"learning_rate": 1.329972017945761e-06, |
|
"loss": 0.9382, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.83943457774556, |
|
"grad_norm": 2.815541285577747, |
|
"learning_rate": 1.3182871683961217e-06, |
|
"loss": 0.9169, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.8401594780717652, |
|
"grad_norm": 2.473680572699823, |
|
"learning_rate": 1.3066502526782653e-06, |
|
"loss": 0.984, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.8408843783979703, |
|
"grad_norm": 2.3309597371635995, |
|
"learning_rate": 1.2950613350424113e-06, |
|
"loss": 0.9095, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.8416092787241755, |
|
"grad_norm": 2.6521093626394148, |
|
"learning_rate": 1.2835204794737755e-06, |
|
"loss": 0.9806, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.8423341790503805, |
|
"grad_norm": 2.2183368928655134, |
|
"learning_rate": 1.272027749692203e-06, |
|
"loss": 1.0063, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.8430590793765858, |
|
"grad_norm": 2.335358593922603, |
|
"learning_rate": 1.2605832091518277e-06, |
|
"loss": 1.0009, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.8437839797027908, |
|
"grad_norm": 3.1347973939628586, |
|
"learning_rate": 1.2491869210407215e-06, |
|
"loss": 1.0215, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.844508880028996, |
|
"grad_norm": 2.2148770630869667, |
|
"learning_rate": 1.2378389482805397e-06, |
|
"loss": 0.9492, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.8452337803552011, |
|
"grad_norm": 1.8250263747397901, |
|
"learning_rate": 1.2265393535261827e-06, |
|
"loss": 0.9507, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.8459586806814063, |
|
"grad_norm": 2.5651175153179806, |
|
"learning_rate": 1.2152881991654364e-06, |
|
"loss": 1.0011, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.8466835810076114, |
|
"grad_norm": 1.9418141541403875, |
|
"learning_rate": 1.204085547318642e-06, |
|
"loss": 1.0761, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.8474084813338166, |
|
"grad_norm": 2.0646032693286833, |
|
"learning_rate": 1.1929314598383423e-06, |
|
"loss": 0.9781, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.8481333816600217, |
|
"grad_norm": 1.8004575813318966, |
|
"learning_rate": 1.1818259983089452e-06, |
|
"loss": 0.9592, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.8488582819862269, |
|
"grad_norm": 2.589648686714714, |
|
"learning_rate": 1.1707692240463876e-06, |
|
"loss": 1.1313, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.849583182312432, |
|
"grad_norm": 2.0114408256271075, |
|
"learning_rate": 1.1597611980977853e-06, |
|
"loss": 0.9651, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.8503080826386372, |
|
"grad_norm": 1.7560110108173013, |
|
"learning_rate": 1.1488019812411067e-06, |
|
"loss": 0.969, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.8510329829648423, |
|
"grad_norm": 2.0223370350736958, |
|
"learning_rate": 1.1378916339848323e-06, |
|
"loss": 0.896, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.8517578832910475, |
|
"grad_norm": 2.2941607223768075, |
|
"learning_rate": 1.127030216567625e-06, |
|
"loss": 1.1139, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.8524827836172526, |
|
"grad_norm": 1.7519205543151821, |
|
"learning_rate": 1.1162177889579906e-06, |
|
"loss": 0.9145, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.8532076839434578, |
|
"grad_norm": 2.0778773139552094, |
|
"learning_rate": 1.1054544108539511e-06, |
|
"loss": 0.8809, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.8539325842696629, |
|
"grad_norm": 1.8224740916196427, |
|
"learning_rate": 1.0947401416827152e-06, |
|
"loss": 0.9089, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.8546574845958681, |
|
"grad_norm": 3.369653427250154, |
|
"learning_rate": 1.0840750406003486e-06, |
|
"loss": 0.9113, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.8553823849220732, |
|
"grad_norm": 3.0341804857536254, |
|
"learning_rate": 1.0734591664914463e-06, |
|
"loss": 1.0581, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.8561072852482784, |
|
"grad_norm": 2.176890160993487, |
|
"learning_rate": 1.0628925779688148e-06, |
|
"loss": 0.8511, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.8568321855744835, |
|
"grad_norm": 2.505948283667956, |
|
"learning_rate": 1.0523753333731357e-06, |
|
"loss": 1.023, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.8575570859006887, |
|
"grad_norm": 2.8808186692504423, |
|
"learning_rate": 1.041907490772658e-06, |
|
"loss": 0.9036, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.8582819862268938, |
|
"grad_norm": 2.378412246151113, |
|
"learning_rate": 1.031489107962863e-06, |
|
"loss": 0.951, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.859006886553099, |
|
"grad_norm": 2.4071476437909594, |
|
"learning_rate": 1.0211202424661604e-06, |
|
"loss": 0.9586, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.8597317868793041, |
|
"grad_norm": 2.6658079886381785, |
|
"learning_rate": 1.0108009515315586e-06, |
|
"loss": 1.1094, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.8604566872055093, |
|
"grad_norm": 2.2215584462527596, |
|
"learning_rate": 1.0005312921343523e-06, |
|
"loss": 0.8504, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.8611815875317144, |
|
"grad_norm": 2.0187808429022107, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.9252, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.8619064878579196, |
|
"grad_norm": 2.4590642711919006, |
|
"learning_rate": 9.801410944828572e-07, |
|
"loss": 0.9505, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.8626313881841247, |
|
"grad_norm": 1.9067553771838575, |
|
"learning_rate": 9.700206688077707e-07, |
|
"loss": 0.9373, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.8633562885103299, |
|
"grad_norm": 2.206921164430631, |
|
"learning_rate": 9.599500998278632e-07, |
|
"loss": 1.0474, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.864081188836535, |
|
"grad_norm": 2.2333129821917095, |
|
"learning_rate": 9.499294431451755e-07, |
|
"loss": 1.0328, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.8648060891627402, |
|
"grad_norm": 2.482019605419004, |
|
"learning_rate": 9.399587540861721e-07, |
|
"loss": 0.9418, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.8655309894889452, |
|
"grad_norm": 1.998307923726419, |
|
"learning_rate": 9.300380877014315e-07, |
|
"loss": 0.979, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.8662558898151504, |
|
"grad_norm": 1.9225932853992822, |
|
"learning_rate": 9.201674987653509e-07, |
|
"loss": 0.9395, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.8669807901413555, |
|
"grad_norm": 2.9091365349477796, |
|
"learning_rate": 9.10347041775832e-07, |
|
"loss": 0.9469, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.8677056904675607, |
|
"grad_norm": 1.9149962880721147, |
|
"learning_rate": 9.00576770953987e-07, |
|
"loss": 1.0476, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.8684305907937658, |
|
"grad_norm": 1.9887916713607694, |
|
"learning_rate": 8.908567402438417e-07, |
|
"loss": 0.8714, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.869155491119971, |
|
"grad_norm": 2.245816854601276, |
|
"learning_rate": 8.811870033120284e-07, |
|
"loss": 0.9797, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.8698803914461761, |
|
"grad_norm": 2.2713836769286595, |
|
"learning_rate": 8.715676135475004e-07, |
|
"loss": 0.9377, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8706052917723813, |
|
"grad_norm": 2.5205170405942905, |
|
"learning_rate": 8.6199862406123e-07, |
|
"loss": 0.9551, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.8713301920985864, |
|
"grad_norm": 2.35062405333527, |
|
"learning_rate": 8.524800876859162e-07, |
|
"loss": 0.997, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.8720550924247916, |
|
"grad_norm": 2.214767113139861, |
|
"learning_rate": 8.430120569756949e-07, |
|
"loss": 0.9209, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.8727799927509967, |
|
"grad_norm": 1.8210526020082338, |
|
"learning_rate": 8.335945842058524e-07, |
|
"loss": 0.8505, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.8735048930772019, |
|
"grad_norm": 3.169940456082299, |
|
"learning_rate": 8.242277213725258e-07, |
|
"loss": 1.0918, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.874229793403407, |
|
"grad_norm": 2.466935646079598, |
|
"learning_rate": 8.149115201924263e-07, |
|
"loss": 0.9271, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.8749546937296122, |
|
"grad_norm": 1.8036637263972901, |
|
"learning_rate": 8.056460321025483e-07, |
|
"loss": 0.9608, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.8756795940558173, |
|
"grad_norm": 1.9639970375097486, |
|
"learning_rate": 7.964313082598884e-07, |
|
"loss": 0.863, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.8764044943820225, |
|
"grad_norm": 1.8756276620751722, |
|
"learning_rate": 7.872673995411606e-07, |
|
"loss": 0.8938, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.8771293947082276, |
|
"grad_norm": 2.2245625435715577, |
|
"learning_rate": 7.781543565425153e-07, |
|
"loss": 0.8795, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.8778542950344328, |
|
"grad_norm": 2.5233957402711105, |
|
"learning_rate": 7.690922295792647e-07, |
|
"loss": 0.9564, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.8785791953606379, |
|
"grad_norm": 2.325154347559063, |
|
"learning_rate": 7.600810686855975e-07, |
|
"loss": 0.9919, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.8793040956868431, |
|
"grad_norm": 2.5256436615222437, |
|
"learning_rate": 7.511209236143047e-07, |
|
"loss": 1.0318, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.8800289960130482, |
|
"grad_norm": 2.709568964312284, |
|
"learning_rate": 7.422118438365156e-07, |
|
"loss": 1.0083, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.8807538963392534, |
|
"grad_norm": 2.2518723793524384, |
|
"learning_rate": 7.333538785414062e-07, |
|
"loss": 0.894, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.8814787966654585, |
|
"grad_norm": 2.443491219954294, |
|
"learning_rate": 7.24547076635942e-07, |
|
"loss": 0.9463, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.8822036969916637, |
|
"grad_norm": 2.134388443555549, |
|
"learning_rate": 7.157914867446003e-07, |
|
"loss": 0.933, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.8829285973178688, |
|
"grad_norm": 2.512674514907188, |
|
"learning_rate": 7.070871572091076e-07, |
|
"loss": 0.893, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.883653497644074, |
|
"grad_norm": 2.453936234251306, |
|
"learning_rate": 6.984341360881664e-07, |
|
"loss": 0.9721, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.8843783979702791, |
|
"grad_norm": 2.1489162189409337, |
|
"learning_rate": 6.898324711571958e-07, |
|
"loss": 0.9212, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.8851032982964843, |
|
"grad_norm": 2.510668864545298, |
|
"learning_rate": 6.812822099080629e-07, |
|
"loss": 0.9371, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.8858281986226894, |
|
"grad_norm": 1.9545987636324476, |
|
"learning_rate": 6.727833995488242e-07, |
|
"loss": 0.9738, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.8865530989488946, |
|
"grad_norm": 2.1706699023503604, |
|
"learning_rate": 6.64336087003461e-07, |
|
"loss": 1.1229, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.8872779992750996, |
|
"grad_norm": 1.8682535430546066, |
|
"learning_rate": 6.559403189116275e-07, |
|
"loss": 0.8849, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.8880028996013049, |
|
"grad_norm": 2.159689783926262, |
|
"learning_rate": 6.475961416283838e-07, |
|
"loss": 0.9401, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8887277999275099, |
|
"grad_norm": 2.7767173791052913, |
|
"learning_rate": 6.393036012239473e-07, |
|
"loss": 0.8924, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.8894527002537151, |
|
"grad_norm": 2.4204337218238905, |
|
"learning_rate": 6.310627434834349e-07, |
|
"loss": 0.9524, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.8901776005799202, |
|
"grad_norm": 1.8765448611106061, |
|
"learning_rate": 6.228736139066105e-07, |
|
"loss": 0.859, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.8909025009061254, |
|
"grad_norm": 1.919204528359367, |
|
"learning_rate": 6.147362577076343e-07, |
|
"loss": 0.8362, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.8916274012323305, |
|
"grad_norm": 1.9433910718170642, |
|
"learning_rate": 6.066507198148142e-07, |
|
"loss": 0.8749, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8923523015585357, |
|
"grad_norm": 2.9697875554755884, |
|
"learning_rate": 5.986170448703577e-07, |
|
"loss": 0.943, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.8930772018847408, |
|
"grad_norm": 2.353044622771824, |
|
"learning_rate": 5.906352772301193e-07, |
|
"loss": 1.1478, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.893802102210946, |
|
"grad_norm": 1.850720536456356, |
|
"learning_rate": 5.827054609633686e-07, |
|
"loss": 0.9644, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.8945270025371511, |
|
"grad_norm": 1.8013108857133973, |
|
"learning_rate": 5.748276398525332e-07, |
|
"loss": 1.0246, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.8952519028633563, |
|
"grad_norm": 2.469890038185596, |
|
"learning_rate": 5.670018573929647e-07, |
|
"loss": 0.8624, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.8959768031895614, |
|
"grad_norm": 2.2167116711922175, |
|
"learning_rate": 5.592281567926983e-07, |
|
"loss": 0.9879, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.8967017035157666, |
|
"grad_norm": 2.4449293680021236, |
|
"learning_rate": 5.515065809722064e-07, |
|
"loss": 0.9898, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.8974266038419717, |
|
"grad_norm": 2.746110084978734, |
|
"learning_rate": 5.438371725641778e-07, |
|
"loss": 1.0036, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.8981515041681769, |
|
"grad_norm": 2.4639823270799983, |
|
"learning_rate": 5.362199739132656e-07, |
|
"loss": 1.0811, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.898876404494382, |
|
"grad_norm": 1.9704675962545042, |
|
"learning_rate": 5.286550270758617e-07, |
|
"loss": 0.8005, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.8996013048205872, |
|
"grad_norm": 2.2146310697220546, |
|
"learning_rate": 5.211423738198618e-07, |
|
"loss": 0.9509, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.9003262051467923, |
|
"grad_norm": 1.8720450871060204, |
|
"learning_rate": 5.136820556244393e-07, |
|
"loss": 0.9735, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.9010511054729975, |
|
"grad_norm": 1.8982694817893693, |
|
"learning_rate": 5.062741136798111e-07, |
|
"loss": 0.9381, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.9017760057992026, |
|
"grad_norm": 1.858714766448879, |
|
"learning_rate": 4.98918588887013e-07, |
|
"loss": 0.9523, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.9025009061254078, |
|
"grad_norm": 2.4016041132668344, |
|
"learning_rate": 4.916155218576734e-07, |
|
"loss": 1.0073, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 2.347954663659916, |
|
"learning_rate": 4.843649529137861e-07, |
|
"loss": 1.0266, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.9039507067778181, |
|
"grad_norm": 2.1409896869995193, |
|
"learning_rate": 4.771669220874908e-07, |
|
"loss": 0.9691, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.9046756071040232, |
|
"grad_norm": 2.49959014733721, |
|
"learning_rate": 4.70021469120856e-07, |
|
"loss": 1.0186, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.9054005074302284, |
|
"grad_norm": 2.1558155360425513, |
|
"learning_rate": 4.629286334656502e-07, |
|
"loss": 0.9444, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.9061254077564335, |
|
"grad_norm": 1.6872092574160515, |
|
"learning_rate": 4.55888454283131e-07, |
|
"loss": 0.8999, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9068503080826387, |
|
"grad_norm": 2.4896678483642445, |
|
"learning_rate": 4.489009704438274e-07, |
|
"loss": 0.9269, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.9075752084088438, |
|
"grad_norm": 2.2948377479813344, |
|
"learning_rate": 4.419662205273234e-07, |
|
"loss": 1.0014, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.908300108735049, |
|
"grad_norm": 2.287777069195488, |
|
"learning_rate": 4.35084242822047e-07, |
|
"loss": 0.9587, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.909025009061254, |
|
"grad_norm": 2.32897276572487, |
|
"learning_rate": 4.282550753250603e-07, |
|
"loss": 0.9474, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.9097499093874593, |
|
"grad_norm": 2.314915920284867, |
|
"learning_rate": 4.2147875574184095e-07, |
|
"loss": 0.981, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.9104748097136643, |
|
"grad_norm": 1.9516096065203952, |
|
"learning_rate": 4.1475532148608665e-07, |
|
"loss": 0.9547, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.9111997100398695, |
|
"grad_norm": 2.1927356687145227, |
|
"learning_rate": 4.08084809679502e-07, |
|
"loss": 0.9606, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.9119246103660746, |
|
"grad_norm": 2.0841597477886924, |
|
"learning_rate": 4.0146725715159096e-07, |
|
"loss": 0.9804, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.9126495106922798, |
|
"grad_norm": 2.5613066364600914, |
|
"learning_rate": 3.9490270043945787e-07, |
|
"loss": 1.0065, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.9133744110184849, |
|
"grad_norm": 2.4125648530924497, |
|
"learning_rate": 3.883911757876058e-07, |
|
"loss": 0.9775, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.9140993113446901, |
|
"grad_norm": 1.939270842406947, |
|
"learning_rate": 3.8193271914773287e-07, |
|
"loss": 0.9695, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.9148242116708952, |
|
"grad_norm": 2.2535052888130336, |
|
"learning_rate": 3.7552736617853745e-07, |
|
"loss": 0.8691, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.9155491119971004, |
|
"grad_norm": 2.06134443104641, |
|
"learning_rate": 3.6917515224552115e-07, |
|
"loss": 0.9972, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.9162740123233055, |
|
"grad_norm": 2.187718479642105, |
|
"learning_rate": 3.628761124207858e-07, |
|
"loss": 0.9614, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.9169989126495107, |
|
"grad_norm": 2.728957931329134, |
|
"learning_rate": 3.566302814828526e-07, |
|
"loss": 1.0619, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.9177238129757158, |
|
"grad_norm": 2.295000346358395, |
|
"learning_rate": 3.504376939164611e-07, |
|
"loss": 1.0473, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.918448713301921, |
|
"grad_norm": 2.077400704239704, |
|
"learning_rate": 3.442983839123826e-07, |
|
"loss": 1.0098, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.9191736136281261, |
|
"grad_norm": 2.3680349659705016, |
|
"learning_rate": 3.382123853672303e-07, |
|
"loss": 1.0091, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.9198985139543313, |
|
"grad_norm": 2.392219243933128, |
|
"learning_rate": 3.321797318832687e-07, |
|
"loss": 1.0328, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.9206234142805364, |
|
"grad_norm": 1.933874299943873, |
|
"learning_rate": 3.262004567682342e-07, |
|
"loss": 0.9634, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.9213483146067416, |
|
"grad_norm": 2.1317697728223997, |
|
"learning_rate": 3.20274593035147e-07, |
|
"loss": 0.9766, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.9220732149329467, |
|
"grad_norm": 1.7882374391836628, |
|
"learning_rate": 3.144021734021352e-07, |
|
"loss": 0.9166, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.9227981152591519, |
|
"grad_norm": 2.053906284134757, |
|
"learning_rate": 3.085832302922398e-07, |
|
"loss": 0.9921, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.923523015585357, |
|
"grad_norm": 2.130994971696479, |
|
"learning_rate": 3.028177958332512e-07, |
|
"loss": 0.9309, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.9242479159115622, |
|
"grad_norm": 2.070911861584657, |
|
"learning_rate": 2.971059018575262e-07, |
|
"loss": 0.9546, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.9249728162377673, |
|
"grad_norm": 2.180354582475891, |
|
"learning_rate": 2.9144757990180685e-07, |
|
"loss": 0.9385, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.9256977165639725, |
|
"grad_norm": 2.101379134926334, |
|
"learning_rate": 2.858428612070585e-07, |
|
"loss": 0.9473, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.9264226168901776, |
|
"grad_norm": 2.941363671739184, |
|
"learning_rate": 2.802917767182822e-07, |
|
"loss": 1.0476, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.9271475172163828, |
|
"grad_norm": 2.099272282487294, |
|
"learning_rate": 2.74794357084357e-07, |
|
"loss": 1.135, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.9278724175425879, |
|
"grad_norm": 2.0386416225655744, |
|
"learning_rate": 2.693506326578632e-07, |
|
"loss": 0.9461, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.9285973178687931, |
|
"grad_norm": 2.158374614724584, |
|
"learning_rate": 2.639606334949163e-07, |
|
"loss": 0.8921, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.9293222181949982, |
|
"grad_norm": 1.8765718966068676, |
|
"learning_rate": 2.586243893550033e-07, |
|
"loss": 0.9305, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.9300471185212034, |
|
"grad_norm": 2.3160935821533144, |
|
"learning_rate": 2.533419297008155e-07, |
|
"loss": 0.9176, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.9307720188474085, |
|
"grad_norm": 2.1878330344349313, |
|
"learning_rate": 2.481132836980871e-07, |
|
"loss": 0.955, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.9314969191736137, |
|
"grad_norm": 2.195724525034424, |
|
"learning_rate": 2.4293848021543443e-07, |
|
"loss": 0.8916, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.9322218194998187, |
|
"grad_norm": 1.7464619703681252, |
|
"learning_rate": 2.378175478241984e-07, |
|
"loss": 0.9268, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.932946719826024, |
|
"grad_norm": 2.5869309705425465, |
|
"learning_rate": 2.3275051479828115e-07, |
|
"loss": 0.9683, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.933671620152229, |
|
"grad_norm": 1.9334780055162688, |
|
"learning_rate": 2.27737409113995e-07, |
|
"loss": 0.9208, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.9343965204784342, |
|
"grad_norm": 1.8929021569949063, |
|
"learning_rate": 2.2277825844990607e-07, |
|
"loss": 0.912, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.9351214208046393, |
|
"grad_norm": 2.2127598690640564, |
|
"learning_rate": 2.1787309018668213e-07, |
|
"loss": 1.0383, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9358463211308445, |
|
"grad_norm": 2.210748449295597, |
|
"learning_rate": 2.130219314069415e-07, |
|
"loss": 0.9721, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.9365712214570496, |
|
"grad_norm": 2.115265811779472, |
|
"learning_rate": 2.0822480889510222e-07, |
|
"loss": 1.0026, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.9372961217832548, |
|
"grad_norm": 2.040211532276065, |
|
"learning_rate": 2.034817491372354e-07, |
|
"loss": 0.9751, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.9380210221094599, |
|
"grad_norm": 2.3955059923006483, |
|
"learning_rate": 1.9879277832091758e-07, |
|
"loss": 1.0528, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.9387459224356651, |
|
"grad_norm": 2.0551722313954297, |
|
"learning_rate": 1.941579223350898e-07, |
|
"loss": 0.9919, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.9394708227618702, |
|
"grad_norm": 1.8511261527636662, |
|
"learning_rate": 1.8957720676991087e-07, |
|
"loss": 0.957, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.9401957230880754, |
|
"grad_norm": 1.986865247006612, |
|
"learning_rate": 1.850506569166155e-07, |
|
"loss": 1.0243, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.9409206234142805, |
|
"grad_norm": 1.9332096525646203, |
|
"learning_rate": 1.8057829776737867e-07, |
|
"loss": 1.0149, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.9416455237404857, |
|
"grad_norm": 2.0914992940185724, |
|
"learning_rate": 1.761601540151736e-07, |
|
"loss": 1.0177, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.9423704240666908, |
|
"grad_norm": 2.4468777213087205, |
|
"learning_rate": 1.7179625005364076e-07, |
|
"loss": 1.072, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.943095324392896, |
|
"grad_norm": 1.982397014057623, |
|
"learning_rate": 1.6748660997694454e-07, |
|
"loss": 0.862, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.9438202247191011, |
|
"grad_norm": 2.4635792769761005, |
|
"learning_rate": 1.6323125757964799e-07, |
|
"loss": 0.9996, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.9445451250453063, |
|
"grad_norm": 2.1226326530971704, |
|
"learning_rate": 1.5903021635658044e-07, |
|
"loss": 1.0194, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.9452700253715114, |
|
"grad_norm": 2.2271194959547116, |
|
"learning_rate": 1.5488350950270237e-07, |
|
"loss": 0.9331, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.9459949256977166, |
|
"grad_norm": 2.472581051188092, |
|
"learning_rate": 1.507911599129841e-07, |
|
"loss": 0.9543, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.9467198260239217, |
|
"grad_norm": 1.872694939056278, |
|
"learning_rate": 1.4675319018227608e-07, |
|
"loss": 1.0782, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.9474447263501269, |
|
"grad_norm": 2.0981456286626297, |
|
"learning_rate": 1.4276962260518223e-07, |
|
"loss": 0.9838, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.948169626676332, |
|
"grad_norm": 2.1938985740167483, |
|
"learning_rate": 1.3884047917594236e-07, |
|
"loss": 1.0023, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.9488945270025372, |
|
"grad_norm": 2.342040537819885, |
|
"learning_rate": 1.349657815883032e-07, |
|
"loss": 0.9773, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.9496194273287423, |
|
"grad_norm": 2.0373395983000795, |
|
"learning_rate": 1.3114555123540762e-07, |
|
"loss": 0.9412, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.9503443276549475, |
|
"grad_norm": 2.3706302014168714, |
|
"learning_rate": 1.2737980920966785e-07, |
|
"loss": 1.0296, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.9510692279811526, |
|
"grad_norm": 2.130493636831118, |
|
"learning_rate": 1.236685763026546e-07, |
|
"loss": 0.9575, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.9517941283073578, |
|
"grad_norm": 2.257925035976561, |
|
"learning_rate": 1.2001187300498153e-07, |
|
"loss": 0.9083, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.9525190286335629, |
|
"grad_norm": 2.1864022493275783, |
|
"learning_rate": 1.1640971950618752e-07, |
|
"loss": 0.923, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.9532439289597681, |
|
"grad_norm": 2.5860606750284734, |
|
"learning_rate": 1.1286213569463467e-07, |
|
"loss": 1.0475, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.9539688292859732, |
|
"grad_norm": 2.258441636325009, |
|
"learning_rate": 1.0936914115738717e-07, |
|
"loss": 0.8783, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.9546937296121784, |
|
"grad_norm": 1.8730597641684372, |
|
"learning_rate": 1.0593075518011143e-07, |
|
"loss": 0.9545, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.9554186299383834, |
|
"grad_norm": 2.3138211525804584, |
|
"learning_rate": 1.0254699674696611e-07, |
|
"loss": 1.016, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.9561435302645886, |
|
"grad_norm": 1.9117655582358168, |
|
"learning_rate": 9.921788454049896e-08, |
|
"loss": 1.0075, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.9568684305907937, |
|
"grad_norm": 2.0487380925988368, |
|
"learning_rate": 9.594343694154129e-08, |
|
"loss": 0.9545, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.9575933309169989, |
|
"grad_norm": 1.808692209003071, |
|
"learning_rate": 9.272367202910804e-08, |
|
"loss": 0.8925, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.958318231243204, |
|
"grad_norm": 2.7015235278624194, |
|
"learning_rate": 8.9558607580299e-08, |
|
"loss": 0.9318, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.9590431315694092, |
|
"grad_norm": 2.2921044642024757, |
|
"learning_rate": 8.644826107019888e-08, |
|
"loss": 1.0238, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.9597680318956143, |
|
"grad_norm": 1.9280021670942513, |
|
"learning_rate": 8.339264967178074e-08, |
|
"loss": 0.971, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.9604929322218195, |
|
"grad_norm": 1.8067867464540794, |
|
"learning_rate": 8.039179025581379e-08, |
|
"loss": 0.8251, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.9612178325480246, |
|
"grad_norm": 2.7036098510140016, |
|
"learning_rate": 7.744569939076574e-08, |
|
"loss": 1.0214, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.9619427328742298, |
|
"grad_norm": 1.7075937015114786, |
|
"learning_rate": 7.455439334271619e-08, |
|
"loss": 0.9234, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.9626676332004349, |
|
"grad_norm": 1.8777145955794914, |
|
"learning_rate": 7.171788807526336e-08, |
|
"loss": 0.8121, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.9633925335266401, |
|
"grad_norm": 2.0244110553223695, |
|
"learning_rate": 6.89361992494375e-08, |
|
"loss": 0.9208, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.9641174338528452, |
|
"grad_norm": 2.542844240335413, |
|
"learning_rate": 6.62093422236132e-08, |
|
"loss": 1.0046, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.9648423341790504, |
|
"grad_norm": 2.543683044105189, |
|
"learning_rate": 6.353733205342715e-08, |
|
"loss": 1.0122, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.9655672345052555, |
|
"grad_norm": 2.1697924274060245, |
|
"learning_rate": 6.092018349169171e-08, |
|
"loss": 1.0031, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.9662921348314607, |
|
"grad_norm": 2.0531262536255985, |
|
"learning_rate": 5.8357910988317e-08, |
|
"loss": 0.8397, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.9670170351576658, |
|
"grad_norm": 2.913557846619087, |
|
"learning_rate": 5.585052869022556e-08, |
|
"loss": 0.9297, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 2.204845207370084, |
|
"learning_rate": 5.339805044128121e-08, |
|
"loss": 1.003, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.9684668358100761, |
|
"grad_norm": 2.644048528912831, |
|
"learning_rate": 5.1000489782210284e-08, |
|
"loss": 1.0444, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.9691917361362813, |
|
"grad_norm": 2.402838017572579, |
|
"learning_rate": 4.8657859950520524e-08, |
|
"loss": 0.9235, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.9699166364624864, |
|
"grad_norm": 1.9654858262351746, |
|
"learning_rate": 4.6370173880438964e-08, |
|
"loss": 0.958, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.9706415367886916, |
|
"grad_norm": 2.7707673392332746, |
|
"learning_rate": 4.413744420283195e-08, |
|
"loss": 0.8956, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.9713664371148967, |
|
"grad_norm": 1.9709988493814223, |
|
"learning_rate": 4.195968324513744e-08, |
|
"loss": 0.9951, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.9720913374411019, |
|
"grad_norm": 2.1055256777177958, |
|
"learning_rate": 3.983690303130172e-08, |
|
"loss": 0.9204, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.972816237767307, |
|
"grad_norm": 2.218819032192361, |
|
"learning_rate": 3.776911528170391e-08, |
|
"loss": 0.8971, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.9735411380935122, |
|
"grad_norm": 2.0858300430985146, |
|
"learning_rate": 3.575633141310153e-08, |
|
"loss": 0.897, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.9742660384197173, |
|
"grad_norm": 2.3972281459872713, |
|
"learning_rate": 3.379856253855951e-08, |
|
"loss": 0.9454, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.9749909387459225, |
|
"grad_norm": 2.488073794270114, |
|
"learning_rate": 3.1895819467391286e-08, |
|
"loss": 0.8999, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.9757158390721276, |
|
"grad_norm": 1.988995529917583, |
|
"learning_rate": 3.0048112705102215e-08, |
|
"loss": 0.971, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.9764407393983328, |
|
"grad_norm": 2.077919130586888, |
|
"learning_rate": 2.8255452453326282e-08, |
|
"loss": 1.003, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.9771656397245378, |
|
"grad_norm": 2.028713109556211, |
|
"learning_rate": 2.65178486097728e-08, |
|
"loss": 1.049, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.977890540050743, |
|
"grad_norm": 1.6908367188168048, |
|
"learning_rate": 2.4835310768172026e-08, |
|
"loss": 0.8732, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.9786154403769481, |
|
"grad_norm": 2.4804402514448127, |
|
"learning_rate": 2.3207848218222974e-08, |
|
"loss": 0.9352, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9793403407031533, |
|
"grad_norm": 2.1542622951441857, |
|
"learning_rate": 2.163546994553789e-08, |
|
"loss": 0.9104, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.9800652410293584, |
|
"grad_norm": 2.527194109345364, |
|
"learning_rate": 2.011818463159676e-08, |
|
"loss": 1.0832, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.9807901413555636, |
|
"grad_norm": 2.317833340426504, |
|
"learning_rate": 1.8656000653698436e-08, |
|
"loss": 1.0094, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.9815150416817687, |
|
"grad_norm": 2.343407752981275, |
|
"learning_rate": 1.724892608491291e-08, |
|
"loss": 1.0245, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.9822399420079739, |
|
"grad_norm": 1.8989849927794842, |
|
"learning_rate": 1.5896968694040228e-08, |
|
"loss": 0.9417, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.982964842334179, |
|
"grad_norm": 2.770976661865063, |
|
"learning_rate": 1.4600135945560534e-08, |
|
"loss": 0.99, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.9836897426603842, |
|
"grad_norm": 1.7947478635260772, |
|
"learning_rate": 1.3358434999602987e-08, |
|
"loss": 0.892, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.9844146429865893, |
|
"grad_norm": 2.310601190306124, |
|
"learning_rate": 1.2171872711895794e-08, |
|
"loss": 1.0248, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.9851395433127945, |
|
"grad_norm": 2.287100614820235, |
|
"learning_rate": 1.1040455633738457e-08, |
|
"loss": 0.9974, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.9858644436389996, |
|
"grad_norm": 2.107445997393385, |
|
"learning_rate": 9.964190011955144e-09, |
|
"loss": 0.9899, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.9865893439652048, |
|
"grad_norm": 2.447737679629097, |
|
"learning_rate": 8.943081788869157e-09, |
|
"loss": 0.9687, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.9873142442914099, |
|
"grad_norm": 2.151832291830881, |
|
"learning_rate": 7.97713660226629e-09, |
|
"loss": 1.0199, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.9880391446176151, |
|
"grad_norm": 2.0385527802522976, |
|
"learning_rate": 7.066359785362631e-09, |
|
"loss": 1.021, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.9887640449438202, |
|
"grad_norm": 2.3197757332833415, |
|
"learning_rate": 6.210756366775705e-09, |
|
"loss": 0.9591, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.9894889452700254, |
|
"grad_norm": 2.3203612246956924, |
|
"learning_rate": 5.410331070498931e-09, |
|
"loss": 0.9674, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.9902138455962305, |
|
"grad_norm": 1.8262826260586555, |
|
"learning_rate": 4.665088315874977e-09, |
|
"loss": 1.0296, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.9909387459224357, |
|
"grad_norm": 1.961319160005979, |
|
"learning_rate": 3.975032217566899e-09, |
|
"loss": 0.9492, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.9916636462486408, |
|
"grad_norm": 1.8126619362913512, |
|
"learning_rate": 3.3401665855414824e-09, |
|
"loss": 0.8814, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.992388546574846, |
|
"grad_norm": 2.0882014952836916, |
|
"learning_rate": 2.760494925045931e-09, |
|
"loss": 0.8576, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.9931134469010511, |
|
"grad_norm": 2.2450600007272037, |
|
"learning_rate": 2.236020436586772e-09, |
|
"loss": 0.9932, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.9938383472272563, |
|
"grad_norm": 2.44028976343664, |
|
"learning_rate": 1.7667460159143113e-09, |
|
"loss": 0.8992, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.9945632475534614, |
|
"grad_norm": 2.0146202920467293, |
|
"learning_rate": 1.3526742540070913e-09, |
|
"loss": 1.006, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.9952881478796666, |
|
"grad_norm": 1.9601240017014594, |
|
"learning_rate": 9.938074370530182e-10, |
|
"loss": 0.9297, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.9960130482058717, |
|
"grad_norm": 1.8087006446044052, |
|
"learning_rate": 6.90147546443809e-10, |
|
"loss": 0.8517, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.9967379485320769, |
|
"grad_norm": 2.3390439919559936, |
|
"learning_rate": 4.416962587583395e-10, |
|
"loss": 0.9555, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.997462848858282, |
|
"grad_norm": 1.9176591785146089, |
|
"learning_rate": 2.484549457559826e-10, |
|
"loss": 0.9623, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.9981877491844872, |
|
"grad_norm": 2.215362657603315, |
|
"learning_rate": 1.1042467436994664e-10, |
|
"loss": 0.8946, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.9989126495106923, |
|
"grad_norm": 2.000196078224186, |
|
"learning_rate": 2.7606206697283755e-11, |
|
"loss": 0.9503, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.9996375498368975, |
|
"grad_norm": 2.2467269541867663, |
|
"learning_rate": 0.0, |
|
"loss": 0.9984, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.9996375498368975, |
|
"step": 1379, |
|
"total_flos": 1.3996327755055104e+16, |
|
"train_loss": 0.49300233914940317, |
|
"train_runtime": 19998.664, |
|
"train_samples_per_second": 8.828, |
|
"train_steps_per_second": 0.069 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1379, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 345, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3996327755055104e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|