|
{ |
|
"best_metric": 0.14084061980247498, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.6302521008403361, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004201680672268907, |
|
"grad_norm": 0.49750980734825134, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.1452, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004201680672268907, |
|
"eval_loss": 0.36222052574157715, |
|
"eval_runtime": 234.0564, |
|
"eval_samples_per_second": 1.713, |
|
"eval_steps_per_second": 0.432, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008403361344537815, |
|
"grad_norm": 0.5146015286445618, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 0.1574, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012605042016806723, |
|
"grad_norm": 0.5062153935432434, |
|
"learning_rate": 3.3e-05, |
|
"loss": 0.1532, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01680672268907563, |
|
"grad_norm": 0.41625574231147766, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 0.1358, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02100840336134454, |
|
"grad_norm": 0.17534741759300232, |
|
"learning_rate": 5.5e-05, |
|
"loss": 0.1105, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.025210084033613446, |
|
"grad_norm": 0.1469479352235794, |
|
"learning_rate": 6.6e-05, |
|
"loss": 0.1, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029411764705882353, |
|
"grad_norm": 0.12348287552595139, |
|
"learning_rate": 7.7e-05, |
|
"loss": 0.0873, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03361344537815126, |
|
"grad_norm": 0.11953099817037582, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.0847, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.037815126050420166, |
|
"grad_norm": 0.11016067862510681, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 0.0695, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04201680672268908, |
|
"grad_norm": 0.14919094741344452, |
|
"learning_rate": 0.00011, |
|
"loss": 0.0631, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.046218487394957986, |
|
"grad_norm": 0.0973418578505516, |
|
"learning_rate": 0.0001099924817745858, |
|
"loss": 0.05, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05042016806722689, |
|
"grad_norm": 0.07051382213830948, |
|
"learning_rate": 0.00010996992915375093, |
|
"loss": 0.0442, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0546218487394958, |
|
"grad_norm": 0.09474644064903259, |
|
"learning_rate": 0.00010993234830315676, |
|
"loss": 0.0438, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 0.09510326385498047, |
|
"learning_rate": 0.0001098797494970326, |
|
"loss": 0.0452, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06302521008403361, |
|
"grad_norm": 0.07116366177797318, |
|
"learning_rate": 0.00010981214711536684, |
|
"loss": 0.0477, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06722689075630252, |
|
"grad_norm": 0.12737803161144257, |
|
"learning_rate": 0.00010972955963997563, |
|
"loss": 0.0442, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.06226251646876335, |
|
"learning_rate": 0.00010963200964945011, |
|
"loss": 0.0345, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07563025210084033, |
|
"grad_norm": 0.06668021529912949, |
|
"learning_rate": 0.00010951952381298364, |
|
"loss": 0.038, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07983193277310924, |
|
"grad_norm": 0.07071222364902496, |
|
"learning_rate": 0.00010939213288308077, |
|
"loss": 0.0414, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08403361344537816, |
|
"grad_norm": 0.06094304844737053, |
|
"learning_rate": 0.00010924987168714973, |
|
"loss": 0.0453, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08823529411764706, |
|
"grad_norm": 0.10369093716144562, |
|
"learning_rate": 0.00010909277911798103, |
|
"loss": 0.0403, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09243697478991597, |
|
"grad_norm": 0.06250135600566864, |
|
"learning_rate": 0.00010892089812311451, |
|
"loss": 0.0407, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09663865546218488, |
|
"grad_norm": 0.059974078088998795, |
|
"learning_rate": 0.00010873427569309797, |
|
"loss": 0.0409, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10084033613445378, |
|
"grad_norm": 0.07670460641384125, |
|
"learning_rate": 0.00010853296284864032, |
|
"loss": 0.05, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10504201680672269, |
|
"grad_norm": 0.053304269909858704, |
|
"learning_rate": 0.00010831701462666318, |
|
"loss": 0.0412, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1092436974789916, |
|
"grad_norm": 0.11782944202423096, |
|
"learning_rate": 0.00010808649006525419, |
|
"loss": 0.053, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1134453781512605, |
|
"grad_norm": 0.06433657556772232, |
|
"learning_rate": 0.00010784145218752665, |
|
"loss": 0.0587, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.08066810667514801, |
|
"learning_rate": 0.00010758196798438968, |
|
"loss": 0.0521, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12184873949579832, |
|
"grad_norm": 0.05485512688755989, |
|
"learning_rate": 0.00010730810839623346, |
|
"loss": 0.0528, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12605042016806722, |
|
"grad_norm": 0.17254118621349335, |
|
"learning_rate": 0.0001070199482935349, |
|
"loss": 0.0577, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13025210084033614, |
|
"grad_norm": 0.11069461703300476, |
|
"learning_rate": 0.00010671756645638888, |
|
"loss": 0.0669, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13445378151260504, |
|
"grad_norm": 0.12035743147134781, |
|
"learning_rate": 0.00010640104555297034, |
|
"loss": 0.0708, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13865546218487396, |
|
"grad_norm": 0.061041515320539474, |
|
"learning_rate": 0.00010607047211693389, |
|
"loss": 0.0832, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.31896713376045227, |
|
"learning_rate": 0.00010572593652375616, |
|
"loss": 0.0914, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 0.12278568744659424, |
|
"learning_rate": 0.00010536753296602816, |
|
"loss": 0.0993, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15126050420168066, |
|
"grad_norm": 0.12456497550010681, |
|
"learning_rate": 0.00010499535942770394, |
|
"loss": 0.1148, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15546218487394958, |
|
"grad_norm": 0.10634661465883255, |
|
"learning_rate": 0.00010460951765731275, |
|
"loss": 0.1196, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15966386554621848, |
|
"grad_norm": 0.09298716485500336, |
|
"learning_rate": 0.000104210113140142, |
|
"loss": 0.1326, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1638655462184874, |
|
"grad_norm": 0.06946210563182831, |
|
"learning_rate": 0.00010379725506939865, |
|
"loss": 0.1163, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16806722689075632, |
|
"grad_norm": 0.1408974677324295, |
|
"learning_rate": 0.0001033710563163569, |
|
"loss": 0.1521, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1722689075630252, |
|
"grad_norm": 0.11890745908021927, |
|
"learning_rate": 0.00010293163339950024, |
|
"loss": 0.1617, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.0880684107542038, |
|
"learning_rate": 0.00010247910645266658, |
|
"loss": 0.1707, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.18067226890756302, |
|
"grad_norm": 0.10107150673866272, |
|
"learning_rate": 0.00010201359919220464, |
|
"loss": 0.2014, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18487394957983194, |
|
"grad_norm": 0.11371296644210815, |
|
"learning_rate": 0.00010153523888315144, |
|
"loss": 0.2271, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18907563025210083, |
|
"grad_norm": 0.13401076197624207, |
|
"learning_rate": 0.00010104415630443907, |
|
"loss": 0.2555, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19327731092436976, |
|
"grad_norm": 0.16017286479473114, |
|
"learning_rate": 0.0001005404857131411, |
|
"loss": 0.3304, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19747899159663865, |
|
"grad_norm": 0.16582688689231873, |
|
"learning_rate": 0.00010002436480776809, |
|
"loss": 0.4034, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.20168067226890757, |
|
"grad_norm": 0.2239397168159485, |
|
"learning_rate": 9.949593469062211e-05, |
|
"loss": 0.5349, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20588235294117646, |
|
"grad_norm": 0.3563201129436493, |
|
"learning_rate": 9.895533982922087e-05, |
|
"loss": 0.9682, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.21008403361344538, |
|
"grad_norm": 0.4066728353500366, |
|
"learning_rate": 9.840272801680165e-05, |
|
"loss": 1.2013, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21008403361344538, |
|
"eval_loss": 0.521058976650238, |
|
"eval_runtime": 245.7858, |
|
"eval_samples_per_second": 1.632, |
|
"eval_steps_per_second": 0.411, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 2.0479297637939453, |
|
"learning_rate": 9.783825033191619e-05, |
|
"loss": 0.549, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2184873949579832, |
|
"grad_norm": 1.3544615507125854, |
|
"learning_rate": 9.726206109712725e-05, |
|
"loss": 0.2489, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.22268907563025211, |
|
"grad_norm": 0.32278114557266235, |
|
"learning_rate": 9.667431783681842e-05, |
|
"loss": 0.0716, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.226890756302521, |
|
"grad_norm": 0.08393025398254395, |
|
"learning_rate": 9.607518123412847e-05, |
|
"loss": 0.0438, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.23109243697478993, |
|
"grad_norm": 0.07648172974586487, |
|
"learning_rate": 9.546481508702224e-05, |
|
"loss": 0.0427, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.07890063524246216, |
|
"learning_rate": 9.48433862635099e-05, |
|
"loss": 0.0433, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23949579831932774, |
|
"grad_norm": 0.0927102193236351, |
|
"learning_rate": 9.421106465602684e-05, |
|
"loss": 0.0453, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24369747899159663, |
|
"grad_norm": 0.08202504366636276, |
|
"learning_rate": 9.356802313498687e-05, |
|
"loss": 0.0378, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.24789915966386555, |
|
"grad_norm": 0.07329721003770828, |
|
"learning_rate": 9.291443750152112e-05, |
|
"loss": 0.0344, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.25210084033613445, |
|
"grad_norm": 0.059026774019002914, |
|
"learning_rate": 9.225048643941577e-05, |
|
"loss": 0.0429, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.25630252100840334, |
|
"grad_norm": 0.05524129793047905, |
|
"learning_rate": 9.157635146626164e-05, |
|
"loss": 0.0362, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2605042016806723, |
|
"grad_norm": 0.08096156269311905, |
|
"learning_rate": 9.089221688382928e-05, |
|
"loss": 0.0396, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2647058823529412, |
|
"grad_norm": 0.08996499329805374, |
|
"learning_rate": 9.019826972768242e-05, |
|
"loss": 0.0384, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2689075630252101, |
|
"grad_norm": 0.04689677432179451, |
|
"learning_rate": 8.949469971604454e-05, |
|
"loss": 0.0348, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.27310924369747897, |
|
"grad_norm": 0.06765469908714294, |
|
"learning_rate": 8.878169919793173e-05, |
|
"loss": 0.0352, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2773109243697479, |
|
"grad_norm": 0.07770418375730515, |
|
"learning_rate": 8.805946310056638e-05, |
|
"loss": 0.0358, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2815126050420168, |
|
"grad_norm": 0.05707339197397232, |
|
"learning_rate": 8.732818887608602e-05, |
|
"loss": 0.0351, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.07923433184623718, |
|
"learning_rate": 8.65880764475619e-05, |
|
"loss": 0.0396, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.28991596638655465, |
|
"grad_norm": 0.07886988669633865, |
|
"learning_rate": 8.583932815434201e-05, |
|
"loss": 0.0361, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.07595830410718918, |
|
"learning_rate": 8.50821486967335e-05, |
|
"loss": 0.0389, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.29831932773109243, |
|
"grad_norm": 0.08922255039215088, |
|
"learning_rate": 8.431674508003966e-05, |
|
"loss": 0.0403, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3025210084033613, |
|
"grad_norm": 0.05408494919538498, |
|
"learning_rate": 8.354332655796683e-05, |
|
"loss": 0.0442, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3067226890756303, |
|
"grad_norm": 0.18477368354797363, |
|
"learning_rate": 8.276210457541642e-05, |
|
"loss": 0.0405, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.31092436974789917, |
|
"grad_norm": 0.19412672519683838, |
|
"learning_rate": 8.197329271067796e-05, |
|
"loss": 0.0453, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.31512605042016806, |
|
"grad_norm": 0.17333091795444489, |
|
"learning_rate": 8.117710661703905e-05, |
|
"loss": 0.0523, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.31932773109243695, |
|
"grad_norm": 0.34910866618156433, |
|
"learning_rate": 8.037376396382784e-05, |
|
"loss": 0.0482, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3235294117647059, |
|
"grad_norm": 0.12087592482566833, |
|
"learning_rate": 7.956348437690437e-05, |
|
"loss": 0.0492, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3277310924369748, |
|
"grad_norm": 0.13387855887413025, |
|
"learning_rate": 7.87464893786171e-05, |
|
"loss": 0.0586, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3319327731092437, |
|
"grad_norm": 0.10430057346820831, |
|
"learning_rate": 7.792300232724097e-05, |
|
"loss": 0.0577, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.33613445378151263, |
|
"grad_norm": 0.058383017778396606, |
|
"learning_rate": 7.709324835591332e-05, |
|
"loss": 0.0578, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3403361344537815, |
|
"grad_norm": 0.2369661182165146, |
|
"learning_rate": 7.625745431108487e-05, |
|
"loss": 0.0654, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3445378151260504, |
|
"grad_norm": 0.0711449384689331, |
|
"learning_rate": 7.541584869050213e-05, |
|
"loss": 0.0725, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3487394957983193, |
|
"grad_norm": 0.11881482601165771, |
|
"learning_rate": 7.456866158073842e-05, |
|
"loss": 0.0738, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.1186806783080101, |
|
"learning_rate": 7.371612459429037e-05, |
|
"loss": 0.093, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.07813920825719833, |
|
"learning_rate": 7.28584708062576e-05, |
|
"loss": 0.0819, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.36134453781512604, |
|
"grad_norm": 0.14843684434890747, |
|
"learning_rate": 7.19959346906221e-05, |
|
"loss": 0.1048, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36554621848739494, |
|
"grad_norm": 0.1119343712925911, |
|
"learning_rate": 7.112875205614558e-05, |
|
"loss": 0.1073, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3697478991596639, |
|
"grad_norm": 0.14418436586856842, |
|
"learning_rate": 7.025715998190145e-05, |
|
"loss": 0.1069, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3739495798319328, |
|
"grad_norm": 0.1696012318134308, |
|
"learning_rate": 6.938139675246009e-05, |
|
"loss": 0.1328, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.37815126050420167, |
|
"grad_norm": 0.07131479680538177, |
|
"learning_rate": 6.850170179274395e-05, |
|
"loss": 0.133, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.38235294117647056, |
|
"grad_norm": 0.214226633310318, |
|
"learning_rate": 6.761831560257134e-05, |
|
"loss": 0.1499, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3865546218487395, |
|
"grad_norm": 0.2105318009853363, |
|
"learning_rate": 6.673147969090608e-05, |
|
"loss": 0.1836, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3907563025210084, |
|
"grad_norm": 0.18370755016803741, |
|
"learning_rate": 6.584143650983141e-05, |
|
"loss": 0.2001, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3949579831932773, |
|
"grad_norm": 0.43610021471977234, |
|
"learning_rate": 6.494842938826605e-05, |
|
"loss": 0.217, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.39915966386554624, |
|
"grad_norm": 0.37327277660369873, |
|
"learning_rate": 6.405270246544037e-05, |
|
"loss": 0.2492, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.40336134453781514, |
|
"grad_norm": 0.30380088090896606, |
|
"learning_rate": 6.31545006241513e-05, |
|
"loss": 0.3072, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.40756302521008403, |
|
"grad_norm": 0.17763619124889374, |
|
"learning_rate": 6.22540694238138e-05, |
|
"loss": 0.3711, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.2223198413848877, |
|
"learning_rate": 6.135165503332725e-05, |
|
"loss": 0.55, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.41596638655462187, |
|
"grad_norm": 0.3319002687931061, |
|
"learning_rate": 6.0447504163775465e-05, |
|
"loss": 0.9316, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.42016806722689076, |
|
"grad_norm": 0.3552457094192505, |
|
"learning_rate": 5.954186400097829e-05, |
|
"loss": 1.0921, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.42016806722689076, |
|
"eval_loss": 0.47890180349349976, |
|
"eval_runtime": 270.8902, |
|
"eval_samples_per_second": 1.48, |
|
"eval_steps_per_second": 0.373, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.42436974789915966, |
|
"grad_norm": 4.169086933135986, |
|
"learning_rate": 5.8634982137913465e-05, |
|
"loss": 0.5264, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 1.9985085725784302, |
|
"learning_rate": 5.772710650702723e-05, |
|
"loss": 0.1248, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4327731092436975, |
|
"grad_norm": 0.057010602205991745, |
|
"learning_rate": 5.681848531245195e-05, |
|
"loss": 0.0331, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4369747899159664, |
|
"grad_norm": 0.07370073348283768, |
|
"learning_rate": 5.590936696214972e-05, |
|
"loss": 0.0401, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4411764705882353, |
|
"grad_norm": 0.07470442354679108, |
|
"learning_rate": 5.5e-05, |
|
"loss": 0.0451, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.44537815126050423, |
|
"grad_norm": 0.07614938914775848, |
|
"learning_rate": 5.409063303785029e-05, |
|
"loss": 0.05, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4495798319327731, |
|
"grad_norm": 0.07786896824836731, |
|
"learning_rate": 5.318151468754805e-05, |
|
"loss": 0.0505, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.453781512605042, |
|
"grad_norm": 0.07709647715091705, |
|
"learning_rate": 5.227289349297277e-05, |
|
"loss": 0.0517, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4579831932773109, |
|
"grad_norm": 0.07772062718868256, |
|
"learning_rate": 5.136501786208654e-05, |
|
"loss": 0.0539, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.46218487394957986, |
|
"grad_norm": 0.07466543465852737, |
|
"learning_rate": 5.045813599902173e-05, |
|
"loss": 0.0563, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46638655462184875, |
|
"grad_norm": 0.07618583738803864, |
|
"learning_rate": 4.955249583622455e-05, |
|
"loss": 0.0543, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.0762595385313034, |
|
"learning_rate": 4.8648344966672767e-05, |
|
"loss": 0.0534, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.47478991596638653, |
|
"grad_norm": 0.07716407626867294, |
|
"learning_rate": 4.774593057618621e-05, |
|
"loss": 0.0501, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4789915966386555, |
|
"grad_norm": 0.07322922348976135, |
|
"learning_rate": 4.6845499375848686e-05, |
|
"loss": 0.0509, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4831932773109244, |
|
"grad_norm": 0.0707123726606369, |
|
"learning_rate": 4.5947297534559625e-05, |
|
"loss": 0.0482, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48739495798319327, |
|
"grad_norm": 0.07027147710323334, |
|
"learning_rate": 4.5051570611733976e-05, |
|
"loss": 0.0489, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.49159663865546216, |
|
"grad_norm": 0.06395595520734787, |
|
"learning_rate": 4.415856349016859e-05, |
|
"loss": 0.045, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4957983193277311, |
|
"grad_norm": 0.06482091546058655, |
|
"learning_rate": 4.326852030909393e-05, |
|
"loss": 0.0441, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.06443566083908081, |
|
"learning_rate": 4.238168439742867e-05, |
|
"loss": 0.0436, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5042016806722689, |
|
"grad_norm": 0.06711577624082565, |
|
"learning_rate": 4.149829820725605e-05, |
|
"loss": 0.0436, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5084033613445378, |
|
"grad_norm": 0.05877511948347092, |
|
"learning_rate": 4.0618603247539916e-05, |
|
"loss": 0.0432, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5126050420168067, |
|
"grad_norm": 0.06349120289087296, |
|
"learning_rate": 3.9742840018098564e-05, |
|
"loss": 0.0508, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5168067226890757, |
|
"grad_norm": 0.05786605551838875, |
|
"learning_rate": 3.887124794385445e-05, |
|
"loss": 0.0459, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5210084033613446, |
|
"grad_norm": 0.058453019708395004, |
|
"learning_rate": 3.80040653093779e-05, |
|
"loss": 0.0446, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5252100840336135, |
|
"grad_norm": 0.05471871420741081, |
|
"learning_rate": 3.714152919374241e-05, |
|
"loss": 0.0485, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.06058911234140396, |
|
"learning_rate": 3.628387540570963e-05, |
|
"loss": 0.0462, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5336134453781513, |
|
"grad_norm": 0.05696989595890045, |
|
"learning_rate": 3.543133841926159e-05, |
|
"loss": 0.0513, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5378151260504201, |
|
"grad_norm": 0.04979941248893738, |
|
"learning_rate": 3.458415130949785e-05, |
|
"loss": 0.0616, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.542016806722689, |
|
"grad_norm": 0.046740416437387466, |
|
"learning_rate": 3.374254568891514e-05, |
|
"loss": 0.0597, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5462184873949579, |
|
"grad_norm": 0.048966214060783386, |
|
"learning_rate": 3.290675164408669e-05, |
|
"loss": 0.0661, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5504201680672269, |
|
"grad_norm": 0.05556921288371086, |
|
"learning_rate": 3.207699767275904e-05, |
|
"loss": 0.0771, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5546218487394958, |
|
"grad_norm": 0.058795638382434845, |
|
"learning_rate": 3.12535106213829e-05, |
|
"loss": 0.0708, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5588235294117647, |
|
"grad_norm": 0.059710677713155746, |
|
"learning_rate": 3.0436515623095647e-05, |
|
"loss": 0.0702, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5630252100840336, |
|
"grad_norm": 0.06440545618534088, |
|
"learning_rate": 2.962623603617218e-05, |
|
"loss": 0.0857, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5672268907563025, |
|
"grad_norm": 0.059835322201251984, |
|
"learning_rate": 2.8822893382960955e-05, |
|
"loss": 0.0959, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.06194557622075081, |
|
"learning_rate": 2.802670728932207e-05, |
|
"loss": 0.1121, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5756302521008403, |
|
"grad_norm": 0.06737734377384186, |
|
"learning_rate": 2.723789542458361e-05, |
|
"loss": 0.1135, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5798319327731093, |
|
"grad_norm": 0.06589267402887344, |
|
"learning_rate": 2.6456673442033183e-05, |
|
"loss": 0.1125, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5840336134453782, |
|
"grad_norm": 0.07247166335582733, |
|
"learning_rate": 2.5683254919960356e-05, |
|
"loss": 0.1377, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.07507860660552979, |
|
"learning_rate": 2.4917851303266533e-05, |
|
"loss": 0.1326, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.592436974789916, |
|
"grad_norm": 0.09094900637865067, |
|
"learning_rate": 2.4160671845658007e-05, |
|
"loss": 0.1411, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5966386554621849, |
|
"grad_norm": 0.09972415864467621, |
|
"learning_rate": 2.3411923552438105e-05, |
|
"loss": 0.1649, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6008403361344538, |
|
"grad_norm": 0.10327650606632233, |
|
"learning_rate": 2.2671811123913983e-05, |
|
"loss": 0.185, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6050420168067226, |
|
"grad_norm": 0.13001111149787903, |
|
"learning_rate": 2.194053689943362e-05, |
|
"loss": 0.2116, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6092436974789915, |
|
"grad_norm": 0.10833977162837982, |
|
"learning_rate": 2.121830080206827e-05, |
|
"loss": 0.2416, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6134453781512605, |
|
"grad_norm": 0.1772170066833496, |
|
"learning_rate": 2.0505300283955464e-05, |
|
"loss": 0.3062, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6176470588235294, |
|
"grad_norm": 0.2684325873851776, |
|
"learning_rate": 1.9801730272317585e-05, |
|
"loss": 0.4366, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6218487394957983, |
|
"grad_norm": 0.288666307926178, |
|
"learning_rate": 1.910778311617072e-05, |
|
"loss": 0.6787, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6260504201680672, |
|
"grad_norm": 0.31838375329971313, |
|
"learning_rate": 1.8423648533738342e-05, |
|
"loss": 0.9365, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6302521008403361, |
|
"grad_norm": 0.3195730745792389, |
|
"learning_rate": 1.7749513560584252e-05, |
|
"loss": 1.051, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6302521008403361, |
|
"eval_loss": 0.14084061980247498, |
|
"eval_runtime": 247.0151, |
|
"eval_samples_per_second": 1.623, |
|
"eval_steps_per_second": 0.409, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.86404819402752e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|