|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.991437584497522, |
|
"eval_steps": 500, |
|
"global_step": 2770, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 13.032843589782715, |
|
"learning_rate": 4.981949458483755e-05, |
|
"loss": 0.6633, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 35.63669204711914, |
|
"learning_rate": 4.963898916967509e-05, |
|
"loss": 0.5651, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 16.95796012878418, |
|
"learning_rate": 4.945848375451264e-05, |
|
"loss": 0.5359, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 14.045697212219238, |
|
"learning_rate": 4.927797833935018e-05, |
|
"loss": 0.5421, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 43.34794235229492, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.5222, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 32.69561767578125, |
|
"learning_rate": 4.891696750902527e-05, |
|
"loss": 0.4589, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 29.39051055908203, |
|
"learning_rate": 4.873646209386282e-05, |
|
"loss": 0.4858, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 30.52906608581543, |
|
"learning_rate": 4.855595667870036e-05, |
|
"loss": 0.4418, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 15.103797912597656, |
|
"learning_rate": 4.837545126353791e-05, |
|
"loss": 0.4713, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 40.57420349121094, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.468, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 37.67963409423828, |
|
"learning_rate": 4.8014440433213e-05, |
|
"loss": 0.492, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 30.945823669433594, |
|
"learning_rate": 4.783393501805055e-05, |
|
"loss": 0.4633, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 40.709590911865234, |
|
"learning_rate": 4.765342960288809e-05, |
|
"loss": 0.4392, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 19.698150634765625, |
|
"learning_rate": 4.747292418772563e-05, |
|
"loss": 0.4707, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 21.289947509765625, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.4576, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 26.911935806274414, |
|
"learning_rate": 4.711191335740072e-05, |
|
"loss": 0.4583, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 19.308853149414062, |
|
"learning_rate": 4.693140794223827e-05, |
|
"loss": 0.4937, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 34.61503219604492, |
|
"learning_rate": 4.675090252707581e-05, |
|
"loss": 0.4836, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 16.551424026489258, |
|
"learning_rate": 4.657039711191336e-05, |
|
"loss": 0.4919, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 16.011377334594727, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.4518, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 22.71525764465332, |
|
"learning_rate": 4.620938628158845e-05, |
|
"loss": 0.4221, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 22.2515811920166, |
|
"learning_rate": 4.602888086642599e-05, |
|
"loss": 0.4286, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 18.233482360839844, |
|
"learning_rate": 4.584837545126354e-05, |
|
"loss": 0.4442, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 33.190948486328125, |
|
"learning_rate": 4.566787003610109e-05, |
|
"loss": 0.4584, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 29.945205688476562, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.4339, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 17.004419326782227, |
|
"learning_rate": 4.530685920577618e-05, |
|
"loss": 0.4504, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 23.36432647705078, |
|
"learning_rate": 4.5126353790613716e-05, |
|
"loss": 0.4405, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 29.156505584716797, |
|
"learning_rate": 4.494584837545127e-05, |
|
"loss": 0.4319, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 16.330259323120117, |
|
"learning_rate": 4.4765342960288806e-05, |
|
"loss": 0.453, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 21.66246223449707, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.416, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 22.177268981933594, |
|
"learning_rate": 4.44043321299639e-05, |
|
"loss": 0.4456, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 38.450069427490234, |
|
"learning_rate": 4.422382671480145e-05, |
|
"loss": 0.4216, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 28.213655471801758, |
|
"learning_rate": 4.404332129963899e-05, |
|
"loss": 0.4392, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 19.354459762573242, |
|
"learning_rate": 4.386281588447654e-05, |
|
"loss": 0.4303, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 20.212663650512695, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.4426, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 34.96459197998047, |
|
"learning_rate": 4.350180505415163e-05, |
|
"loss": 0.4146, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 17.65025520324707, |
|
"learning_rate": 4.332129963898917e-05, |
|
"loss": 0.4506, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 18.796554565429688, |
|
"learning_rate": 4.314079422382672e-05, |
|
"loss": 0.3765, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 21.690357208251953, |
|
"learning_rate": 4.296028880866426e-05, |
|
"loss": 0.4158, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 39.8782844543457, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.4602, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 26.3798885345459, |
|
"learning_rate": 4.259927797833935e-05, |
|
"loss": 0.4512, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 18.073617935180664, |
|
"learning_rate": 4.24187725631769e-05, |
|
"loss": 0.4347, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 30.09633445739746, |
|
"learning_rate": 4.223826714801444e-05, |
|
"loss": 0.3858, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 17.02672576904297, |
|
"learning_rate": 4.205776173285199e-05, |
|
"loss": 0.4815, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 19.149808883666992, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.4196, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 28.140705108642578, |
|
"learning_rate": 4.169675090252708e-05, |
|
"loss": 0.4085, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 21.819814682006836, |
|
"learning_rate": 4.151624548736462e-05, |
|
"loss": 0.3986, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 15.050581932067871, |
|
"learning_rate": 4.1335740072202167e-05, |
|
"loss": 0.4267, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 14.929478645324707, |
|
"learning_rate": 4.115523465703972e-05, |
|
"loss": 0.4315, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 20.087432861328125, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.3886, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 32.96128463745117, |
|
"learning_rate": 4.079422382671481e-05, |
|
"loss": 0.4046, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 19.673940658569336, |
|
"learning_rate": 4.0613718411552346e-05, |
|
"loss": 0.4127, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 16.12790298461914, |
|
"learning_rate": 4.043321299638989e-05, |
|
"loss": 0.4227, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 20.4881649017334, |
|
"learning_rate": 4.0252707581227436e-05, |
|
"loss": 0.4223, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 17.96915054321289, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.395, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8363481591828048, |
|
"eval_fn": 322, |
|
"eval_fp": 447, |
|
"eval_loss": 0.372616171836853, |
|
"eval_precision": 0.8146766169154229, |
|
"eval_recall": 0.8592041976388282, |
|
"eval_runtime": 12.0589, |
|
"eval_samples_per_second": 387.513, |
|
"eval_steps_per_second": 48.512, |
|
"eval_tn": 1939, |
|
"eval_tp": 1965, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 16.42650604248047, |
|
"learning_rate": 3.989169675090253e-05, |
|
"loss": 0.3725, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 14.740382194519043, |
|
"learning_rate": 3.971119133574007e-05, |
|
"loss": 0.3028, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 21.919588088989258, |
|
"learning_rate": 3.953068592057762e-05, |
|
"loss": 0.3371, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 23.157644271850586, |
|
"learning_rate": 3.935018050541516e-05, |
|
"loss": 0.3098, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 19.175317764282227, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.3394, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 24.65229034423828, |
|
"learning_rate": 3.898916967509025e-05, |
|
"loss": 0.3204, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 20.54008674621582, |
|
"learning_rate": 3.88086642599278e-05, |
|
"loss": 0.2994, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 24.291664123535156, |
|
"learning_rate": 3.862815884476535e-05, |
|
"loss": 0.3038, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 21.287853240966797, |
|
"learning_rate": 3.844765342960289e-05, |
|
"loss": 0.3118, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 19.468006134033203, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.2484, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 26.11173439025879, |
|
"learning_rate": 3.8086642599277976e-05, |
|
"loss": 0.308, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 24.100196838378906, |
|
"learning_rate": 3.790613718411553e-05, |
|
"loss": 0.3032, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 23.543447494506836, |
|
"learning_rate": 3.7725631768953066e-05, |
|
"loss": 0.2704, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 34.42282485961914, |
|
"learning_rate": 3.754512635379062e-05, |
|
"loss": 0.3207, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 52.59242630004883, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.303, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 28.380693435668945, |
|
"learning_rate": 3.718411552346571e-05, |
|
"loss": 0.2782, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 30.861343383789062, |
|
"learning_rate": 3.700361010830325e-05, |
|
"loss": 0.3218, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 24.913606643676758, |
|
"learning_rate": 3.68231046931408e-05, |
|
"loss": 0.2904, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 32.816200256347656, |
|
"learning_rate": 3.664259927797834e-05, |
|
"loss": 0.31, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 21.931764602661133, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.2986, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 18.701095581054688, |
|
"learning_rate": 3.628158844765343e-05, |
|
"loss": 0.2841, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 18.000774383544922, |
|
"learning_rate": 3.610108303249098e-05, |
|
"loss": 0.3354, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 37.74651336669922, |
|
"learning_rate": 3.592057761732852e-05, |
|
"loss": 0.3101, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 20.058916091918945, |
|
"learning_rate": 3.574007220216607e-05, |
|
"loss": 0.3324, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 25.672412872314453, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.2853, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 25.06855010986328, |
|
"learning_rate": 3.537906137184116e-05, |
|
"loss": 0.2913, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 28.99418067932129, |
|
"learning_rate": 3.51985559566787e-05, |
|
"loss": 0.3055, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 31.89482307434082, |
|
"learning_rate": 3.5018050541516247e-05, |
|
"loss": 0.3102, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 19.116968154907227, |
|
"learning_rate": 3.483754512635379e-05, |
|
"loss": 0.3079, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 17.446434020996094, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.2972, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 24.87527847290039, |
|
"learning_rate": 3.447653429602888e-05, |
|
"loss": 0.3148, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 27.43445587158203, |
|
"learning_rate": 3.4296028880866426e-05, |
|
"loss": 0.3321, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 29.606460571289062, |
|
"learning_rate": 3.411552346570397e-05, |
|
"loss": 0.3132, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 26.478710174560547, |
|
"learning_rate": 3.3935018050541516e-05, |
|
"loss": 0.2844, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 39.11272048950195, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.3578, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 25.545093536376953, |
|
"learning_rate": 3.3574007220216606e-05, |
|
"loss": 0.2977, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 32.68045425415039, |
|
"learning_rate": 3.339350180505416e-05, |
|
"loss": 0.2662, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 21.527555465698242, |
|
"learning_rate": 3.3212996389891696e-05, |
|
"loss": 0.3121, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 25.452423095703125, |
|
"learning_rate": 3.303249097472924e-05, |
|
"loss": 0.3243, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 36.91832733154297, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.3199, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 29.058032989501953, |
|
"learning_rate": 3.267148014440433e-05, |
|
"loss": 0.2745, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 34.39455032348633, |
|
"learning_rate": 3.249097472924188e-05, |
|
"loss": 0.294, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 32.51374053955078, |
|
"learning_rate": 3.231046931407942e-05, |
|
"loss": 0.2994, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 22.213062286376953, |
|
"learning_rate": 3.212996389891697e-05, |
|
"loss": 0.3296, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 26.40260887145996, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.2876, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 26.672101974487305, |
|
"learning_rate": 3.176895306859206e-05, |
|
"loss": 0.3172, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 20.305883407592773, |
|
"learning_rate": 3.15884476534296e-05, |
|
"loss": 0.3199, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 33.50856018066406, |
|
"learning_rate": 3.140794223826715e-05, |
|
"loss": 0.2886, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 38.9220085144043, |
|
"learning_rate": 3.12274368231047e-05, |
|
"loss": 0.3198, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 31.592369079589844, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.3174, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 23.867713928222656, |
|
"learning_rate": 3.086642599277979e-05, |
|
"loss": 0.2686, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 30.095354080200195, |
|
"learning_rate": 3.0685920577617325e-05, |
|
"loss": 0.2915, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 21.510950088500977, |
|
"learning_rate": 3.0505415162454877e-05, |
|
"loss": 0.3328, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 25.863767623901367, |
|
"learning_rate": 3.032490974729242e-05, |
|
"loss": 0.3207, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 28.48052978515625, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.3221, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 29.120187759399414, |
|
"learning_rate": 2.996389891696751e-05, |
|
"loss": 0.2667, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.8394543546694648, |
|
"eval_fn": 287, |
|
"eval_fp": 478, |
|
"eval_loss": 0.3905850350856781, |
|
"eval_precision": 0.8071025020177562, |
|
"eval_recall": 0.8745080891998251, |
|
"eval_runtime": 17.6994, |
|
"eval_samples_per_second": 264.02, |
|
"eval_steps_per_second": 33.052, |
|
"eval_tn": 1908, |
|
"eval_tp": 2000, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.018026137899955, |
|
"grad_norm": 17.400802612304688, |
|
"learning_rate": 2.9783393501805057e-05, |
|
"loss": 0.1682, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.0360522757999098, |
|
"grad_norm": 18.69659423828125, |
|
"learning_rate": 2.9602888086642598e-05, |
|
"loss": 0.1222, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.0540784136998647, |
|
"grad_norm": 50.107913970947266, |
|
"learning_rate": 2.9422382671480147e-05, |
|
"loss": 0.1668, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.0721045515998195, |
|
"grad_norm": 46.38024139404297, |
|
"learning_rate": 2.924187725631769e-05, |
|
"loss": 0.1498, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.090130689499775, |
|
"grad_norm": 25.969005584716797, |
|
"learning_rate": 2.906137184115524e-05, |
|
"loss": 0.159, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.1081568273997298, |
|
"grad_norm": 39.98933792114258, |
|
"learning_rate": 2.888086642599278e-05, |
|
"loss": 0.1309, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.1261829652996846, |
|
"grad_norm": 52.10431671142578, |
|
"learning_rate": 2.870036101083033e-05, |
|
"loss": 0.1667, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.1442091031996395, |
|
"grad_norm": 40.3720703125, |
|
"learning_rate": 2.851985559566787e-05, |
|
"loss": 0.1893, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.1622352410995944, |
|
"grad_norm": 38.03019332885742, |
|
"learning_rate": 2.8339350180505413e-05, |
|
"loss": 0.1565, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.1802613789995493, |
|
"grad_norm": 45.700809478759766, |
|
"learning_rate": 2.815884476534296e-05, |
|
"loss": 0.1461, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.198287516899504, |
|
"grad_norm": 37.712379455566406, |
|
"learning_rate": 2.7978339350180506e-05, |
|
"loss": 0.1561, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.216313654799459, |
|
"grad_norm": 33.16317367553711, |
|
"learning_rate": 2.779783393501805e-05, |
|
"loss": 0.1484, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.234339792699414, |
|
"grad_norm": 19.943294525146484, |
|
"learning_rate": 2.7617328519855596e-05, |
|
"loss": 0.1659, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.2523659305993693, |
|
"grad_norm": 20.473445892333984, |
|
"learning_rate": 2.7436823104693144e-05, |
|
"loss": 0.1515, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.270392068499324, |
|
"grad_norm": 38.28823471069336, |
|
"learning_rate": 2.7256317689530686e-05, |
|
"loss": 0.1856, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.288418206399279, |
|
"grad_norm": 17.913368225097656, |
|
"learning_rate": 2.7075812274368234e-05, |
|
"loss": 0.1326, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.306444344299234, |
|
"grad_norm": 20.06118392944336, |
|
"learning_rate": 2.6895306859205776e-05, |
|
"loss": 0.1426, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.324470482199189, |
|
"grad_norm": 30.24198341369629, |
|
"learning_rate": 2.6714801444043324e-05, |
|
"loss": 0.1237, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.3424966200991437, |
|
"grad_norm": 25.373851776123047, |
|
"learning_rate": 2.6534296028880866e-05, |
|
"loss": 0.1321, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.3605227579990986, |
|
"grad_norm": 61.367210388183594, |
|
"learning_rate": 2.6353790613718414e-05, |
|
"loss": 0.1628, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.3785488958990535, |
|
"grad_norm": 41.69776916503906, |
|
"learning_rate": 2.617328519855596e-05, |
|
"loss": 0.1245, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.3965750337990084, |
|
"grad_norm": 42.08649826049805, |
|
"learning_rate": 2.59927797833935e-05, |
|
"loss": 0.1612, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.4146011716989637, |
|
"grad_norm": 22.838258743286133, |
|
"learning_rate": 2.581227436823105e-05, |
|
"loss": 0.1373, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.4326273095989186, |
|
"grad_norm": 52.76067352294922, |
|
"learning_rate": 2.563176895306859e-05, |
|
"loss": 0.1511, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.4506534474988735, |
|
"grad_norm": 50.871219635009766, |
|
"learning_rate": 2.545126353790614e-05, |
|
"loss": 0.1777, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.4686795853988284, |
|
"grad_norm": 19.779335021972656, |
|
"learning_rate": 2.527075812274368e-05, |
|
"loss": 0.1778, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.4867057232987833, |
|
"grad_norm": 40.787845611572266, |
|
"learning_rate": 2.509025270758123e-05, |
|
"loss": 0.1433, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.504731861198738, |
|
"grad_norm": 39.423404693603516, |
|
"learning_rate": 2.4909747292418774e-05, |
|
"loss": 0.1372, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.522757999098693, |
|
"grad_norm": 46.86770248413086, |
|
"learning_rate": 2.472924187725632e-05, |
|
"loss": 0.2135, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.540784136998648, |
|
"grad_norm": 17.858736038208008, |
|
"learning_rate": 2.4548736462093864e-05, |
|
"loss": 0.1631, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.558810274898603, |
|
"grad_norm": 36.45213317871094, |
|
"learning_rate": 2.436823104693141e-05, |
|
"loss": 0.1753, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.576836412798558, |
|
"grad_norm": 51.208805084228516, |
|
"learning_rate": 2.4187725631768953e-05, |
|
"loss": 0.1722, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.5948625506985126, |
|
"grad_norm": 20.30636978149414, |
|
"learning_rate": 2.40072202166065e-05, |
|
"loss": 0.1516, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.612888688598468, |
|
"grad_norm": 57.81936264038086, |
|
"learning_rate": 2.3826714801444043e-05, |
|
"loss": 0.1488, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.630914826498423, |
|
"grad_norm": 33.060733795166016, |
|
"learning_rate": 2.3646209386281588e-05, |
|
"loss": 0.1625, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.6489409643983777, |
|
"grad_norm": 54.26327133178711, |
|
"learning_rate": 2.3465703971119137e-05, |
|
"loss": 0.1771, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.6669671022983326, |
|
"grad_norm": 25.87835693359375, |
|
"learning_rate": 2.328519855595668e-05, |
|
"loss": 0.191, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.6849932401982874, |
|
"grad_norm": 34.679718017578125, |
|
"learning_rate": 2.3104693140794227e-05, |
|
"loss": 0.138, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.7030193780982423, |
|
"grad_norm": 44.00432205200195, |
|
"learning_rate": 2.292418772563177e-05, |
|
"loss": 0.1652, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.721045515998197, |
|
"grad_norm": 11.452496528625488, |
|
"learning_rate": 2.2743682310469316e-05, |
|
"loss": 0.1457, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.7390716538981525, |
|
"grad_norm": 34.115055084228516, |
|
"learning_rate": 2.2563176895306858e-05, |
|
"loss": 0.1607, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.757097791798107, |
|
"grad_norm": 37.13041305541992, |
|
"learning_rate": 2.2382671480144403e-05, |
|
"loss": 0.1519, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.7751239296980623, |
|
"grad_norm": 49.859092712402344, |
|
"learning_rate": 2.220216606498195e-05, |
|
"loss": 0.1842, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.793150067598017, |
|
"grad_norm": 20.27824592590332, |
|
"learning_rate": 2.2021660649819496e-05, |
|
"loss": 0.1352, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.811176205497972, |
|
"grad_norm": 54.31693649291992, |
|
"learning_rate": 2.184115523465704e-05, |
|
"loss": 0.2065, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.829202343397927, |
|
"grad_norm": 53.624446868896484, |
|
"learning_rate": 2.1660649819494586e-05, |
|
"loss": 0.167, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.847228481297882, |
|
"grad_norm": 22.161457061767578, |
|
"learning_rate": 2.148014440433213e-05, |
|
"loss": 0.1425, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.8652546191978367, |
|
"grad_norm": 27.219818115234375, |
|
"learning_rate": 2.1299638989169676e-05, |
|
"loss": 0.2153, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.8832807570977916, |
|
"grad_norm": 32.98075866699219, |
|
"learning_rate": 2.111913357400722e-05, |
|
"loss": 0.1473, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.901306894997747, |
|
"grad_norm": 71.80169677734375, |
|
"learning_rate": 2.0938628158844766e-05, |
|
"loss": 0.1333, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.9193330328977014, |
|
"grad_norm": 45.45034408569336, |
|
"learning_rate": 2.075812274368231e-05, |
|
"loss": 0.1525, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.9373591707976567, |
|
"grad_norm": 36.39427185058594, |
|
"learning_rate": 2.057761732851986e-05, |
|
"loss": 0.1315, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.9553853086976116, |
|
"grad_norm": 36.360191345214844, |
|
"learning_rate": 2.0397111913357404e-05, |
|
"loss": 0.1497, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.9734114465975665, |
|
"grad_norm": 45.537322998046875, |
|
"learning_rate": 2.0216606498194946e-05, |
|
"loss": 0.1357, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.9914375844975214, |
|
"grad_norm": 22.563854217529297, |
|
"learning_rate": 2.003610108303249e-05, |
|
"loss": 0.166, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.82996632996633, |
|
"eval_fn": 315, |
|
"eval_fp": 493, |
|
"eval_loss": 0.5492772459983826, |
|
"eval_precision": 0.8, |
|
"eval_recall": 0.8622649759510276, |
|
"eval_runtime": 12.9988, |
|
"eval_samples_per_second": 359.495, |
|
"eval_steps_per_second": 45.004, |
|
"eval_tn": 1893, |
|
"eval_tp": 1972, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.0090130689499777, |
|
"grad_norm": 7.169532299041748, |
|
"learning_rate": 1.9855595667870036e-05, |
|
"loss": 0.1283, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.0270392068499326, |
|
"grad_norm": 53.082916259765625, |
|
"learning_rate": 1.967509025270758e-05, |
|
"loss": 0.0816, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.0450653447498874, |
|
"grad_norm": 47.888553619384766, |
|
"learning_rate": 1.9494584837545125e-05, |
|
"loss": 0.0742, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.0630914826498423, |
|
"grad_norm": 54.369449615478516, |
|
"learning_rate": 1.9314079422382674e-05, |
|
"loss": 0.0728, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.081117620549797, |
|
"grad_norm": 7.978189945220947, |
|
"learning_rate": 1.913357400722022e-05, |
|
"loss": 0.0702, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.099143758449752, |
|
"grad_norm": 30.30511474609375, |
|
"learning_rate": 1.8953068592057764e-05, |
|
"loss": 0.065, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.117169896349707, |
|
"grad_norm": 15.761763572692871, |
|
"learning_rate": 1.877256317689531e-05, |
|
"loss": 0.0573, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.135196034249662, |
|
"grad_norm": 47.82561492919922, |
|
"learning_rate": 1.8592057761732854e-05, |
|
"loss": 0.0842, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.1532221721496168, |
|
"grad_norm": 35.642642974853516, |
|
"learning_rate": 1.84115523465704e-05, |
|
"loss": 0.0745, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.171248310049572, |
|
"grad_norm": 27.68004608154297, |
|
"learning_rate": 1.8231046931407943e-05, |
|
"loss": 0.0743, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.189274447949527, |
|
"grad_norm": 16.700593948364258, |
|
"learning_rate": 1.805054151624549e-05, |
|
"loss": 0.061, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.207300585849482, |
|
"grad_norm": 8.846168518066406, |
|
"learning_rate": 1.7870036101083033e-05, |
|
"loss": 0.0581, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.2253267237494367, |
|
"grad_norm": 38.31391143798828, |
|
"learning_rate": 1.768953068592058e-05, |
|
"loss": 0.1093, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.2433528616493916, |
|
"grad_norm": 39.713565826416016, |
|
"learning_rate": 1.7509025270758123e-05, |
|
"loss": 0.0859, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.2613789995493465, |
|
"grad_norm": 51.820274353027344, |
|
"learning_rate": 1.7328519855595668e-05, |
|
"loss": 0.0768, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.2794051374493014, |
|
"grad_norm": 64.148681640625, |
|
"learning_rate": 1.7148014440433213e-05, |
|
"loss": 0.0688, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.2974312753492563, |
|
"grad_norm": 42.258243560791016, |
|
"learning_rate": 1.6967509025270758e-05, |
|
"loss": 0.0787, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.315457413249211, |
|
"grad_norm": 7.588295936584473, |
|
"learning_rate": 1.6787003610108303e-05, |
|
"loss": 0.0516, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.3334835511491665, |
|
"grad_norm": 61.41086196899414, |
|
"learning_rate": 1.6606498194945848e-05, |
|
"loss": 0.0759, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.3515096890491214, |
|
"grad_norm": 2.3156187534332275, |
|
"learning_rate": 1.6425992779783393e-05, |
|
"loss": 0.0649, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.3695358269490763, |
|
"grad_norm": 37.482566833496094, |
|
"learning_rate": 1.624548736462094e-05, |
|
"loss": 0.0882, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.387561964849031, |
|
"grad_norm": 73.03086853027344, |
|
"learning_rate": 1.6064981949458486e-05, |
|
"loss": 0.0588, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.405588102748986, |
|
"grad_norm": 66.03443908691406, |
|
"learning_rate": 1.588447653429603e-05, |
|
"loss": 0.0786, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.423614240648941, |
|
"grad_norm": 85.51991271972656, |
|
"learning_rate": 1.5703971119133576e-05, |
|
"loss": 0.1056, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.441640378548896, |
|
"grad_norm": 146.41830444335938, |
|
"learning_rate": 1.552346570397112e-05, |
|
"loss": 0.0603, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.4596665164488507, |
|
"grad_norm": 10.789298057556152, |
|
"learning_rate": 1.5342960288808663e-05, |
|
"loss": 0.081, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.4776926543488056, |
|
"grad_norm": 55.54345703125, |
|
"learning_rate": 1.516245487364621e-05, |
|
"loss": 0.0884, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.495718792248761, |
|
"grad_norm": 11.078765869140625, |
|
"learning_rate": 1.4981949458483754e-05, |
|
"loss": 0.0609, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.5137449301487154, |
|
"grad_norm": 67.22940063476562, |
|
"learning_rate": 1.4801444043321299e-05, |
|
"loss": 0.0923, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.5317710680486707, |
|
"grad_norm": 21.463754653930664, |
|
"learning_rate": 1.4620938628158846e-05, |
|
"loss": 0.0784, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.5497972059486256, |
|
"grad_norm": 46.619197845458984, |
|
"learning_rate": 1.444043321299639e-05, |
|
"loss": 0.0557, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.5678233438485805, |
|
"grad_norm": 38.9202766418457, |
|
"learning_rate": 1.4259927797833936e-05, |
|
"loss": 0.0773, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.5858494817485354, |
|
"grad_norm": 75.98242950439453, |
|
"learning_rate": 1.407942238267148e-05, |
|
"loss": 0.0704, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.6038756196484902, |
|
"grad_norm": 21.9030818939209, |
|
"learning_rate": 1.3898916967509026e-05, |
|
"loss": 0.0917, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.621901757548445, |
|
"grad_norm": 39.96767807006836, |
|
"learning_rate": 1.3718411552346572e-05, |
|
"loss": 0.0596, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.6399278954484, |
|
"grad_norm": 34.662776947021484, |
|
"learning_rate": 1.3537906137184117e-05, |
|
"loss": 0.0943, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.6579540333483553, |
|
"grad_norm": 83.10199737548828, |
|
"learning_rate": 1.3357400722021662e-05, |
|
"loss": 0.0836, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.67598017124831, |
|
"grad_norm": 43.2503662109375, |
|
"learning_rate": 1.3176895306859207e-05, |
|
"loss": 0.0706, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.694006309148265, |
|
"grad_norm": 29.68882942199707, |
|
"learning_rate": 1.299638989169675e-05, |
|
"loss": 0.0795, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.71203244704822, |
|
"grad_norm": 29.66282844543457, |
|
"learning_rate": 1.2815884476534295e-05, |
|
"loss": 0.0581, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.730058584948175, |
|
"grad_norm": 44.41033935546875, |
|
"learning_rate": 1.263537906137184e-05, |
|
"loss": 0.0801, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.7480847228481298, |
|
"grad_norm": 36.88494110107422, |
|
"learning_rate": 1.2454873646209387e-05, |
|
"loss": 0.0815, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.7661108607480847, |
|
"grad_norm": 49.79912185668945, |
|
"learning_rate": 1.2274368231046932e-05, |
|
"loss": 0.0758, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.7841369986480395, |
|
"grad_norm": 44.74293899536133, |
|
"learning_rate": 1.2093862815884477e-05, |
|
"loss": 0.0701, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.8021631365479944, |
|
"grad_norm": 19.233646392822266, |
|
"learning_rate": 1.1913357400722022e-05, |
|
"loss": 0.0836, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.8201892744479498, |
|
"grad_norm": 74.74971771240234, |
|
"learning_rate": 1.1732851985559568e-05, |
|
"loss": 0.1071, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.838215412347904, |
|
"grad_norm": 65.20303344726562, |
|
"learning_rate": 1.1552346570397113e-05, |
|
"loss": 0.0761, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.8562415502478595, |
|
"grad_norm": 67.05883026123047, |
|
"learning_rate": 1.1371841155234658e-05, |
|
"loss": 0.1228, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.8742676881478144, |
|
"grad_norm": 21.299774169921875, |
|
"learning_rate": 1.1191335740072201e-05, |
|
"loss": 0.1194, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.8922938260477693, |
|
"grad_norm": 12.536727905273438, |
|
"learning_rate": 1.1010830324909748e-05, |
|
"loss": 0.0878, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.910319963947724, |
|
"grad_norm": 75.28766632080078, |
|
"learning_rate": 1.0830324909747293e-05, |
|
"loss": 0.0555, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.928346101847679, |
|
"grad_norm": 30.614364624023438, |
|
"learning_rate": 1.0649819494584838e-05, |
|
"loss": 0.0766, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.946372239747634, |
|
"grad_norm": 62.048099517822266, |
|
"learning_rate": 1.0469314079422383e-05, |
|
"loss": 0.0459, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.964398377647589, |
|
"grad_norm": 5.321977615356445, |
|
"learning_rate": 1.028880866425993e-05, |
|
"loss": 0.0751, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.982424515547544, |
|
"grad_norm": 109.93370819091797, |
|
"learning_rate": 1.0108303249097473e-05, |
|
"loss": 0.0572, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 51.198822021484375, |
|
"learning_rate": 9.927797833935018e-06, |
|
"loss": 0.0649, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_f1": 0.8250753985351141, |
|
"eval_fn": 372, |
|
"eval_fp": 440, |
|
"eval_loss": 0.7689136266708374, |
|
"eval_precision": 0.8131634819532909, |
|
"eval_recall": 0.8373414954088325, |
|
"eval_runtime": 17.851, |
|
"eval_samples_per_second": 261.777, |
|
"eval_steps_per_second": 32.771, |
|
"eval_tn": 1946, |
|
"eval_tp": 1915, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.018026137899955, |
|
"grad_norm": 59.48260498046875, |
|
"learning_rate": 9.747292418772563e-06, |
|
"loss": 0.0571, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.03605227579991, |
|
"grad_norm": 48.74031448364258, |
|
"learning_rate": 9.56678700361011e-06, |
|
"loss": 0.0425, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.054078413699865, |
|
"grad_norm": 29.84206771850586, |
|
"learning_rate": 9.386281588447654e-06, |
|
"loss": 0.045, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.0721045515998195, |
|
"grad_norm": 12.593896865844727, |
|
"learning_rate": 9.2057761732852e-06, |
|
"loss": 0.0484, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.090130689499775, |
|
"grad_norm": 43.5374870300293, |
|
"learning_rate": 9.025270758122744e-06, |
|
"loss": 0.0366, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.108156827399729, |
|
"grad_norm": 7.72160530090332, |
|
"learning_rate": 8.84476534296029e-06, |
|
"loss": 0.0601, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.126182965299685, |
|
"grad_norm": 53.89113998413086, |
|
"learning_rate": 8.664259927797834e-06, |
|
"loss": 0.0399, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.144209103199639, |
|
"grad_norm": 60.404144287109375, |
|
"learning_rate": 8.483754512635379e-06, |
|
"loss": 0.0299, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.162235241099594, |
|
"grad_norm": 56.35673522949219, |
|
"learning_rate": 8.303249097472924e-06, |
|
"loss": 0.0371, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.18026137899955, |
|
"grad_norm": 34.7463493347168, |
|
"learning_rate": 8.12274368231047e-06, |
|
"loss": 0.0331, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.198287516899504, |
|
"grad_norm": 3.510976552963257, |
|
"learning_rate": 7.942238267148016e-06, |
|
"loss": 0.0502, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.2163136547994595, |
|
"grad_norm": 1.4661507606506348, |
|
"learning_rate": 7.76173285198556e-06, |
|
"loss": 0.0219, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.234339792699414, |
|
"grad_norm": 37.3101921081543, |
|
"learning_rate": 7.581227436823105e-06, |
|
"loss": 0.0466, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.252365930599369, |
|
"grad_norm": 34.598289489746094, |
|
"learning_rate": 7.4007220216606496e-06, |
|
"loss": 0.0301, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.270392068499324, |
|
"grad_norm": 5.949997425079346, |
|
"learning_rate": 7.220216606498195e-06, |
|
"loss": 0.0201, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.288418206399279, |
|
"grad_norm": 16.611650466918945, |
|
"learning_rate": 7.03971119133574e-06, |
|
"loss": 0.0525, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.3064443442992335, |
|
"grad_norm": 140.83535766601562, |
|
"learning_rate": 6.859205776173286e-06, |
|
"loss": 0.0593, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.324470482199189, |
|
"grad_norm": 16.425495147705078, |
|
"learning_rate": 6.678700361010831e-06, |
|
"loss": 0.0436, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.342496620099144, |
|
"grad_norm": 0.8764305710792542, |
|
"learning_rate": 6.498194945848375e-06, |
|
"loss": 0.063, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.360522757999099, |
|
"grad_norm": 18.133955001831055, |
|
"learning_rate": 6.31768953068592e-06, |
|
"loss": 0.0398, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.378548895899054, |
|
"grad_norm": 35.4921989440918, |
|
"learning_rate": 6.137184115523466e-06, |
|
"loss": 0.0403, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.396575033799008, |
|
"grad_norm": 73.876220703125, |
|
"learning_rate": 5.956678700361011e-06, |
|
"loss": 0.0849, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.414601171698964, |
|
"grad_norm": 76.24176025390625, |
|
"learning_rate": 5.776173285198557e-06, |
|
"loss": 0.0741, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.432627309598918, |
|
"grad_norm": 3.0315968990325928, |
|
"learning_rate": 5.595667870036101e-06, |
|
"loss": 0.0387, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.4506534474988735, |
|
"grad_norm": 40.14049530029297, |
|
"learning_rate": 5.4151624548736465e-06, |
|
"loss": 0.041, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.468679585398828, |
|
"grad_norm": 36.951107025146484, |
|
"learning_rate": 5.2346570397111915e-06, |
|
"loss": 0.0458, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.486705723298783, |
|
"grad_norm": 90.73778533935547, |
|
"learning_rate": 5.054151624548736e-06, |
|
"loss": 0.0484, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.504731861198739, |
|
"grad_norm": 124.33832550048828, |
|
"learning_rate": 4.873646209386281e-06, |
|
"loss": 0.0383, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.522757999098693, |
|
"grad_norm": 0.6697239875793457, |
|
"learning_rate": 4.693140794223827e-06, |
|
"loss": 0.0287, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.540784136998648, |
|
"grad_norm": 87.48033905029297, |
|
"learning_rate": 4.512635379061372e-06, |
|
"loss": 0.0521, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.558810274898603, |
|
"grad_norm": 25.96059799194336, |
|
"learning_rate": 4.332129963898917e-06, |
|
"loss": 0.0545, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.576836412798558, |
|
"grad_norm": 8.84247875213623, |
|
"learning_rate": 4.151624548736462e-06, |
|
"loss": 0.0558, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.594862550698513, |
|
"grad_norm": 11.46461296081543, |
|
"learning_rate": 3.971119133574008e-06, |
|
"loss": 0.0127, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.612888688598468, |
|
"grad_norm": 29.026018142700195, |
|
"learning_rate": 3.7906137184115523e-06, |
|
"loss": 0.0361, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.630914826498422, |
|
"grad_norm": 51.55570602416992, |
|
"learning_rate": 3.6101083032490977e-06, |
|
"loss": 0.0638, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.648940964398378, |
|
"grad_norm": 12.191635131835938, |
|
"learning_rate": 3.429602888086643e-06, |
|
"loss": 0.0628, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.666967102298333, |
|
"grad_norm": 79.45787811279297, |
|
"learning_rate": 3.2490974729241876e-06, |
|
"loss": 0.0191, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.6849932401982874, |
|
"grad_norm": 66.27655792236328, |
|
"learning_rate": 3.068592057761733e-06, |
|
"loss": 0.0272, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.703019378098243, |
|
"grad_norm": 49.525123596191406, |
|
"learning_rate": 2.8880866425992783e-06, |
|
"loss": 0.0417, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.721045515998197, |
|
"grad_norm": 64.05691528320312, |
|
"learning_rate": 2.7075812274368233e-06, |
|
"loss": 0.0516, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.7390716538981525, |
|
"grad_norm": 13.44025993347168, |
|
"learning_rate": 2.527075812274368e-06, |
|
"loss": 0.0095, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.757097791798107, |
|
"grad_norm": 75.41944122314453, |
|
"learning_rate": 2.3465703971119136e-06, |
|
"loss": 0.0185, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.775123929698062, |
|
"grad_norm": 17.435264587402344, |
|
"learning_rate": 2.1660649819494585e-06, |
|
"loss": 0.051, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.793150067598017, |
|
"grad_norm": 41.04684066772461, |
|
"learning_rate": 1.985559566787004e-06, |
|
"loss": 0.0273, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.811176205497972, |
|
"grad_norm": 63.74116897583008, |
|
"learning_rate": 1.8050541516245488e-06, |
|
"loss": 0.0084, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.829202343397927, |
|
"grad_norm": 1.8474096059799194, |
|
"learning_rate": 1.6245487364620938e-06, |
|
"loss": 0.0124, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.847228481297882, |
|
"grad_norm": 44.675045013427734, |
|
"learning_rate": 1.4440433212996392e-06, |
|
"loss": 0.0266, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.865254619197837, |
|
"grad_norm": 35.31599426269531, |
|
"learning_rate": 1.263537906137184e-06, |
|
"loss": 0.0604, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.883280757097792, |
|
"grad_norm": 45.294647216796875, |
|
"learning_rate": 1.0830324909747293e-06, |
|
"loss": 0.029, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.901306894997747, |
|
"grad_norm": 28.098051071166992, |
|
"learning_rate": 9.025270758122744e-07, |
|
"loss": 0.0631, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.919333032897701, |
|
"grad_norm": 88.63848876953125, |
|
"learning_rate": 7.220216606498196e-07, |
|
"loss": 0.0484, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.937359170797657, |
|
"grad_norm": 12.318571090698242, |
|
"learning_rate": 5.415162454873646e-07, |
|
"loss": 0.027, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.955385308697611, |
|
"grad_norm": 6.036357879638672, |
|
"learning_rate": 3.610108303249098e-07, |
|
"loss": 0.0268, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.9734114465975665, |
|
"grad_norm": 59.02548599243164, |
|
"learning_rate": 1.805054151624549e-07, |
|
"loss": 0.0485, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.991437584497522, |
|
"grad_norm": 1.434023380279541, |
|
"learning_rate": 0.0, |
|
"loss": 0.0374, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.991437584497522, |
|
"eval_f1": 0.8218366483283961, |
|
"eval_fn": 345, |
|
"eval_fp": 497, |
|
"eval_loss": 1.0458904504776, |
|
"eval_precision": 0.7962279622796228, |
|
"eval_recall": 0.8491473546130301, |
|
"eval_runtime": 14.08, |
|
"eval_samples_per_second": 331.89, |
|
"eval_steps_per_second": 41.548, |
|
"eval_tn": 1889, |
|
"eval_tp": 1942, |
|
"step": 2770 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.331374439333888e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|