|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1110, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0, |
|
"eval_f1": 0.6562770562770562, |
|
"eval_loss": 0.7476533651351929, |
|
"eval_precision": 0.48976954555244456, |
|
"eval_recall": 0.9943156974202011, |
|
"eval_runtime": 16.4688, |
|
"eval_samples_per_second": 283.749, |
|
"eval_steps_per_second": 35.522, |
|
"step": 0 |
|
}, |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 14.20566463470459, |
|
"learning_rate": 4.981949458483755e-05, |
|
"loss": 0.6876, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 27.79998016357422, |
|
"learning_rate": 4.963898916967509e-05, |
|
"loss": 0.5988, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 39.63682556152344, |
|
"learning_rate": 4.945848375451264e-05, |
|
"loss": 0.5677, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 38.49867630004883, |
|
"learning_rate": 4.927797833935018e-05, |
|
"loss": 0.5922, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 52.28614044189453, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.5506, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 29.180490493774414, |
|
"learning_rate": 4.891696750902527e-05, |
|
"loss": 0.5173, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 33.55237579345703, |
|
"learning_rate": 4.873646209386282e-05, |
|
"loss": 0.4931, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 32.063228607177734, |
|
"learning_rate": 4.855595667870036e-05, |
|
"loss": 0.483, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 24.234926223754883, |
|
"learning_rate": 4.837545126353791e-05, |
|
"loss": 0.47, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 33.66775894165039, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.4564, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 53.322574615478516, |
|
"learning_rate": 4.8014440433213e-05, |
|
"loss": 0.5083, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 29.443470001220703, |
|
"learning_rate": 4.783393501805055e-05, |
|
"loss": 0.463, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 33.13877487182617, |
|
"learning_rate": 4.765342960288809e-05, |
|
"loss": 0.4626, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 25.509536743164062, |
|
"learning_rate": 4.747292418772563e-05, |
|
"loss": 0.5113, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 21.015031814575195, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.4611, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 28.079387664794922, |
|
"learning_rate": 4.711191335740072e-05, |
|
"loss": 0.4783, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 19.978599548339844, |
|
"learning_rate": 4.693140794223827e-05, |
|
"loss": 0.5026, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 29.316265106201172, |
|
"learning_rate": 4.675090252707581e-05, |
|
"loss": 0.4906, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 18.79282569885254, |
|
"learning_rate": 4.657039711191336e-05, |
|
"loss": 0.5016, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 18.66801643371582, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.4502, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 27.929332733154297, |
|
"learning_rate": 4.620938628158845e-05, |
|
"loss": 0.4302, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 20.541879653930664, |
|
"learning_rate": 4.602888086642599e-05, |
|
"loss": 0.4496, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 19.034873962402344, |
|
"learning_rate": 4.584837545126354e-05, |
|
"loss": 0.4541, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 32.077945709228516, |
|
"learning_rate": 4.566787003610109e-05, |
|
"loss": 0.4562, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 25.475997924804688, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.4456, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 17.132720947265625, |
|
"learning_rate": 4.530685920577618e-05, |
|
"loss": 0.4629, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 22.313941955566406, |
|
"learning_rate": 4.5126353790613716e-05, |
|
"loss": 0.4444, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 22.373477935791016, |
|
"learning_rate": 4.494584837545127e-05, |
|
"loss": 0.4404, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 16.72430992126465, |
|
"learning_rate": 4.4765342960288806e-05, |
|
"loss": 0.4611, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 27.037927627563477, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.4272, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 31.637985229492188, |
|
"learning_rate": 4.44043321299639e-05, |
|
"loss": 0.448, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 35.010459899902344, |
|
"learning_rate": 4.422382671480145e-05, |
|
"loss": 0.4215, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 26.25156021118164, |
|
"learning_rate": 4.404332129963899e-05, |
|
"loss": 0.4396, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 19.4619083404541, |
|
"learning_rate": 4.386281588447654e-05, |
|
"loss": 0.4378, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 19.16460609436035, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.4366, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 30.745384216308594, |
|
"learning_rate": 4.350180505415163e-05, |
|
"loss": 0.4062, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 16.873245239257812, |
|
"learning_rate": 4.332129963898917e-05, |
|
"loss": 0.439, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 25.771486282348633, |
|
"learning_rate": 4.314079422382672e-05, |
|
"loss": 0.4053, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 14.48513412475586, |
|
"learning_rate": 4.296028880866426e-05, |
|
"loss": 0.4164, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 34.993465423583984, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.4438, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 22.576608657836914, |
|
"learning_rate": 4.259927797833935e-05, |
|
"loss": 0.4518, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 19.088132858276367, |
|
"learning_rate": 4.24187725631769e-05, |
|
"loss": 0.4495, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 35.2998046875, |
|
"learning_rate": 4.223826714801444e-05, |
|
"loss": 0.3897, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 20.395572662353516, |
|
"learning_rate": 4.205776173285199e-05, |
|
"loss": 0.4818, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 21.174386978149414, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.4103, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 32.97265625, |
|
"learning_rate": 4.169675090252708e-05, |
|
"loss": 0.4036, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 23.78690528869629, |
|
"learning_rate": 4.151624548736462e-05, |
|
"loss": 0.4079, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 17.087247848510742, |
|
"learning_rate": 4.1335740072202167e-05, |
|
"loss": 0.4371, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 13.810160636901855, |
|
"learning_rate": 4.115523465703972e-05, |
|
"loss": 0.4249, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 20.197996139526367, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.3991, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 33.1153564453125, |
|
"learning_rate": 4.079422382671481e-05, |
|
"loss": 0.3994, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 18.20081901550293, |
|
"learning_rate": 4.0613718411552346e-05, |
|
"loss": 0.4176, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 20.09193992614746, |
|
"learning_rate": 4.043321299638989e-05, |
|
"loss": 0.4297, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 15.939692497253418, |
|
"learning_rate": 4.0252707581227436e-05, |
|
"loss": 0.4309, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 24.811601638793945, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.3908, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8367177786987153, |
|
"eval_loss": 0.37827184796333313, |
|
"eval_precision": 0.7951949586451359, |
|
"eval_recall": 0.8828159160472234, |
|
"eval_runtime": 15.9582, |
|
"eval_samples_per_second": 292.827, |
|
"eval_steps_per_second": 36.658, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 16.960424423217773, |
|
"learning_rate": 3.989169675090253e-05, |
|
"loss": 0.3719, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 15.816173553466797, |
|
"learning_rate": 3.971119133574007e-05, |
|
"loss": 0.3238, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 19.375043869018555, |
|
"learning_rate": 3.953068592057762e-05, |
|
"loss": 0.3516, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 36.740943908691406, |
|
"learning_rate": 3.935018050541516e-05, |
|
"loss": 0.3329, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 23.23931884765625, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.345, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 23.009553909301758, |
|
"learning_rate": 3.898916967509025e-05, |
|
"loss": 0.3054, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 18.865903854370117, |
|
"learning_rate": 3.88086642599278e-05, |
|
"loss": 0.3236, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 21.043642044067383, |
|
"learning_rate": 3.862815884476535e-05, |
|
"loss": 0.3092, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 22.50505256652832, |
|
"learning_rate": 3.844765342960289e-05, |
|
"loss": 0.3274, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 20.4163818359375, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.2459, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 21.210535049438477, |
|
"learning_rate": 3.8086642599277976e-05, |
|
"loss": 0.3365, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 21.084487915039062, |
|
"learning_rate": 3.790613718411553e-05, |
|
"loss": 0.3242, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 22.721454620361328, |
|
"learning_rate": 3.7725631768953066e-05, |
|
"loss": 0.2844, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 49.28988265991211, |
|
"learning_rate": 3.754512635379062e-05, |
|
"loss": 0.3247, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 48.35593795776367, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.3217, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 30.997228622436523, |
|
"learning_rate": 3.718411552346571e-05, |
|
"loss": 0.2854, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 35.57414627075195, |
|
"learning_rate": 3.700361010830325e-05, |
|
"loss": 0.3315, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 27.17759895324707, |
|
"learning_rate": 3.68231046931408e-05, |
|
"loss": 0.2965, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 31.296627044677734, |
|
"learning_rate": 3.664259927797834e-05, |
|
"loss": 0.3396, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 18.639175415039062, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.3178, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 31.972665786743164, |
|
"learning_rate": 3.628158844765343e-05, |
|
"loss": 0.2927, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 14.907872200012207, |
|
"learning_rate": 3.610108303249098e-05, |
|
"loss": 0.3391, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 33.58858871459961, |
|
"learning_rate": 3.592057761732852e-05, |
|
"loss": 0.3111, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 17.019819259643555, |
|
"learning_rate": 3.574007220216607e-05, |
|
"loss": 0.3401, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 27.324296951293945, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.2917, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 30.170120239257812, |
|
"learning_rate": 3.537906137184116e-05, |
|
"loss": 0.2871, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 44.26614761352539, |
|
"learning_rate": 3.51985559566787e-05, |
|
"loss": 0.3307, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 28.978620529174805, |
|
"learning_rate": 3.5018050541516247e-05, |
|
"loss": 0.3207, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 22.961040496826172, |
|
"learning_rate": 3.483754512635379e-05, |
|
"loss": 0.3075, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 22.511327743530273, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.3035, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 33.944190979003906, |
|
"learning_rate": 3.447653429602888e-05, |
|
"loss": 0.3243, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 24.72734832763672, |
|
"learning_rate": 3.4296028880866426e-05, |
|
"loss": 0.3403, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 31.211387634277344, |
|
"learning_rate": 3.411552346570397e-05, |
|
"loss": 0.3142, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 23.974918365478516, |
|
"learning_rate": 3.3935018050541516e-05, |
|
"loss": 0.3015, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 30.705829620361328, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.3529, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 20.697528839111328, |
|
"learning_rate": 3.3574007220216606e-05, |
|
"loss": 0.302, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 28.308101654052734, |
|
"learning_rate": 3.339350180505416e-05, |
|
"loss": 0.2735, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 22.28900909423828, |
|
"learning_rate": 3.3212996389891696e-05, |
|
"loss": 0.3194, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 29.04107666015625, |
|
"learning_rate": 3.303249097472924e-05, |
|
"loss": 0.3265, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 40.08032989501953, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.3331, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 29.40334129333496, |
|
"learning_rate": 3.267148014440433e-05, |
|
"loss": 0.2664, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 33.51088333129883, |
|
"learning_rate": 3.249097472924188e-05, |
|
"loss": 0.3031, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 27.003469467163086, |
|
"learning_rate": 3.231046931407942e-05, |
|
"loss": 0.3195, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 26.585309982299805, |
|
"learning_rate": 3.212996389891697e-05, |
|
"loss": 0.3385, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 36.866294860839844, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.3029, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 44.910362243652344, |
|
"learning_rate": 3.176895306859206e-05, |
|
"loss": 0.3239, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 20.13945198059082, |
|
"learning_rate": 3.15884476534296e-05, |
|
"loss": 0.3071, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 42.44215774536133, |
|
"learning_rate": 3.140794223826715e-05, |
|
"loss": 0.3246, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 31.791410446166992, |
|
"learning_rate": 3.12274368231047e-05, |
|
"loss": 0.3273, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 21.244722366333008, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.3093, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 27.103349685668945, |
|
"learning_rate": 3.086642599277979e-05, |
|
"loss": 0.2694, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 35.73145294189453, |
|
"learning_rate": 3.0685920577617325e-05, |
|
"loss": 0.2934, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 19.97008514404297, |
|
"learning_rate": 3.0505415162454877e-05, |
|
"loss": 0.3493, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 23.084754943847656, |
|
"learning_rate": 3.032490974729242e-05, |
|
"loss": 0.3243, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 29.515155792236328, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.3235, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 24.17915916442871, |
|
"learning_rate": 2.996389891696751e-05, |
|
"loss": 0.2751, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.8428896708755951, |
|
"eval_loss": 0.3940623104572296, |
|
"eval_precision": 0.800314465408805, |
|
"eval_recall": 0.890249234805422, |
|
"eval_runtime": 12.3989, |
|
"eval_samples_per_second": 376.889, |
|
"eval_steps_per_second": 47.182, |
|
"step": 1110 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9341494909501440.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|