|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 100, |
|
"global_step": 1396, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.014326647564469915, |
|
"grad_norm": 219.6043368919095, |
|
"learning_rate": 9.9998935058887e-05, |
|
"loss": 3.6922, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02865329512893983, |
|
"grad_norm": 164.46657899747223, |
|
"learning_rate": 9.99820692232566e-05, |
|
"loss": 5.6597, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04297994269340974, |
|
"grad_norm": 6.412857819851782, |
|
"learning_rate": 9.993928004395286e-05, |
|
"loss": 2.5057, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05730659025787966, |
|
"grad_norm": 69.62252521072725, |
|
"learning_rate": 9.987119700662767e-05, |
|
"loss": 1.984, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07163323782234957, |
|
"grad_norm": 2.911386364058055, |
|
"learning_rate": 9.977785459481133e-05, |
|
"loss": 1.9796, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08595988538681948, |
|
"grad_norm": 2.928138422380621, |
|
"learning_rate": 9.965930008571152e-05, |
|
"loss": 1.9357, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10028653295128939, |
|
"grad_norm": 2.718649753716308, |
|
"learning_rate": 9.951559352626757e-05, |
|
"loss": 1.9499, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11461318051575932, |
|
"grad_norm": 3.2224244565131954, |
|
"learning_rate": 9.934680770273733e-05, |
|
"loss": 1.9316, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12893982808022922, |
|
"grad_norm": 2.35688226018453, |
|
"learning_rate": 9.915302810383132e-05, |
|
"loss": 1.9162, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14326647564469913, |
|
"grad_norm": 2.332818455987922, |
|
"learning_rate": 9.893435287741343e-05, |
|
"loss": 1.9155, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14326647564469913, |
|
"eval_loss": 1.8934402465820312, |
|
"eval_runtime": 53.9021, |
|
"eval_samples_per_second": 184.112, |
|
"eval_steps_per_second": 1.447, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15759312320916904, |
|
"grad_norm": 2.188522515670293, |
|
"learning_rate": 9.869089278078987e-05, |
|
"loss": 1.8955, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17191977077363896, |
|
"grad_norm": 2.0681470915932927, |
|
"learning_rate": 9.842277112461131e-05, |
|
"loss": 1.8796, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18624641833810887, |
|
"grad_norm": 2.0222340151069544, |
|
"learning_rate": 9.813012371041714e-05, |
|
"loss": 1.8806, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.20057306590257878, |
|
"grad_norm": 2.192495513300194, |
|
"learning_rate": 9.781309876185291e-05, |
|
"loss": 1.8566, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2148997134670487, |
|
"grad_norm": 2.17884511538925, |
|
"learning_rate": 9.747185684959626e-05, |
|
"loss": 1.8784, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22922636103151864, |
|
"grad_norm": 2.2802921311633972, |
|
"learning_rate": 9.710657081002914e-05, |
|
"loss": 1.8404, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.24355300859598855, |
|
"grad_norm": 2.12926899758067, |
|
"learning_rate": 9.671742565769749e-05, |
|
"loss": 1.8486, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.25787965616045844, |
|
"grad_norm": 1.9971883700915698, |
|
"learning_rate": 9.630461849160281e-05, |
|
"loss": 1.8228, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2722063037249284, |
|
"grad_norm": 2.3808879139034613, |
|
"learning_rate": 9.586835839537311e-05, |
|
"loss": 1.8419, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.28653295128939826, |
|
"grad_norm": 2.1250632793004702, |
|
"learning_rate": 9.540886633136352e-05, |
|
"loss": 1.7826, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.28653295128939826, |
|
"eval_loss": 1.7807523012161255, |
|
"eval_runtime": 53.6847, |
|
"eval_samples_per_second": 184.857, |
|
"eval_steps_per_second": 1.453, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3008595988538682, |
|
"grad_norm": 2.074489802943034, |
|
"learning_rate": 9.492637502874073e-05, |
|
"loss": 1.7436, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3151862464183381, |
|
"grad_norm": 1.8033562678630226, |
|
"learning_rate": 9.44211288656074e-05, |
|
"loss": 1.7747, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.32951289398280803, |
|
"grad_norm": 1.7937171543469097, |
|
"learning_rate": 9.389338374522656e-05, |
|
"loss": 1.8156, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3438395415472779, |
|
"grad_norm": 1.7554977702779284, |
|
"learning_rate": 9.334340696640855e-05, |
|
"loss": 1.767, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.35816618911174786, |
|
"grad_norm": 2.2371815357457208, |
|
"learning_rate": 9.277147708812635e-05, |
|
"loss": 1.7629, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.37249283667621774, |
|
"grad_norm": 2.0086148480908297, |
|
"learning_rate": 9.217788378842749e-05, |
|
"loss": 1.7388, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3868194842406877, |
|
"grad_norm": 1.6807840994252192, |
|
"learning_rate": 9.156292771771447e-05, |
|
"loss": 1.7428, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.40114613180515757, |
|
"grad_norm": 1.7341861848893365, |
|
"learning_rate": 9.09269203464676e-05, |
|
"loss": 1.7206, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4154727793696275, |
|
"grad_norm": 1.8322321070759275, |
|
"learning_rate": 9.027018380748766e-05, |
|
"loss": 1.6931, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4297994269340974, |
|
"grad_norm": 1.615978489405473, |
|
"learning_rate": 8.959305073273809e-05, |
|
"loss": 1.6897, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4297994269340974, |
|
"eval_loss": 1.6719396114349365, |
|
"eval_runtime": 53.4447, |
|
"eval_samples_per_second": 185.687, |
|
"eval_steps_per_second": 1.459, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.44412607449856734, |
|
"grad_norm": 1.8985940929478535, |
|
"learning_rate": 8.889586408486953e-05, |
|
"loss": 1.6554, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4584527220630373, |
|
"grad_norm": 1.9424706081821295, |
|
"learning_rate": 8.817897698351185e-05, |
|
"loss": 1.6655, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.47277936962750716, |
|
"grad_norm": 1.9287612597705355, |
|
"learning_rate": 8.744275252642184e-05, |
|
"loss": 1.638, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4871060171919771, |
|
"grad_norm": 1.5833650317049555, |
|
"learning_rate": 8.668756360557697e-05, |
|
"loss": 1.6386, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.501432664756447, |
|
"grad_norm": 1.9070708361736106, |
|
"learning_rate": 8.591379271830855e-05, |
|
"loss": 1.6365, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5157593123209169, |
|
"grad_norm": 1.7717200377491302, |
|
"learning_rate": 8.512183177356973e-05, |
|
"loss": 1.6135, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5300859598853869, |
|
"grad_norm": 1.5741330801409983, |
|
"learning_rate": 8.43120818934367e-05, |
|
"loss": 1.6001, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5444126074498568, |
|
"grad_norm": 2.216205258530314, |
|
"learning_rate": 8.348495320994345e-05, |
|
"loss": 1.6161, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5587392550143266, |
|
"grad_norm": 1.6749338500257063, |
|
"learning_rate": 8.264086465735312e-05, |
|
"loss": 1.5873, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5730659025787965, |
|
"grad_norm": 1.6669193921386205, |
|
"learning_rate": 8.178024375997097e-05, |
|
"loss": 1.5887, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5730659025787965, |
|
"eval_loss": 1.5738122463226318, |
|
"eval_runtime": 53.5809, |
|
"eval_samples_per_second": 185.215, |
|
"eval_steps_per_second": 1.456, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5873925501432665, |
|
"grad_norm": 1.6738558434456052, |
|
"learning_rate": 8.090352641560668e-05, |
|
"loss": 1.6297, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6017191977077364, |
|
"grad_norm": 1.6812511762700748, |
|
"learning_rate": 8.001115667479552e-05, |
|
"loss": 1.5987, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6160458452722063, |
|
"grad_norm": 1.6615769390951736, |
|
"learning_rate": 7.910358651589015e-05, |
|
"loss": 1.5509, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6303724928366762, |
|
"grad_norm": 1.696391678445336, |
|
"learning_rate": 7.818127561613707e-05, |
|
"loss": 1.5429, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6446991404011462, |
|
"grad_norm": 1.5660326039107408, |
|
"learning_rate": 7.724469111885371e-05, |
|
"loss": 1.5396, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6590257879656161, |
|
"grad_norm": 2.028710946062277, |
|
"learning_rate": 7.629430739682398e-05, |
|
"loss": 1.5266, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.673352435530086, |
|
"grad_norm": 2.116807818224008, |
|
"learning_rate": 7.533060581203201e-05, |
|
"loss": 1.5824, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6876790830945558, |
|
"grad_norm": 1.637751947026498, |
|
"learning_rate": 7.435407447185623e-05, |
|
"loss": 1.4828, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7020057306590258, |
|
"grad_norm": 1.5564501538035538, |
|
"learning_rate": 7.336520798184664e-05, |
|
"loss": 1.4991, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7163323782234957, |
|
"grad_norm": 1.575375723060998, |
|
"learning_rate": 7.236450719521092e-05, |
|
"loss": 1.4628, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7163323782234957, |
|
"eval_loss": 1.4659688472747803, |
|
"eval_runtime": 53.8819, |
|
"eval_samples_per_second": 184.18, |
|
"eval_steps_per_second": 1.448, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7306590257879656, |
|
"grad_norm": 1.5701973244119096, |
|
"learning_rate": 7.135247895913623e-05, |
|
"loss": 1.485, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7449856733524355, |
|
"grad_norm": 1.7014160396040479, |
|
"learning_rate": 7.032963585807501e-05, |
|
"loss": 1.454, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7593123209169055, |
|
"grad_norm": 1.4812078348628686, |
|
"learning_rate": 6.929649595412497e-05, |
|
"loss": 1.4675, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7736389684813754, |
|
"grad_norm": 1.6254083780903348, |
|
"learning_rate": 6.825358252463461e-05, |
|
"loss": 1.4232, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7879656160458453, |
|
"grad_norm": 1.593996844960836, |
|
"learning_rate": 6.720142379716728e-05, |
|
"loss": 1.4084, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8022922636103151, |
|
"grad_norm": 1.4683667712701527, |
|
"learning_rate": 6.614055268195805e-05, |
|
"loss": 1.4034, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8166189111747851, |
|
"grad_norm": 1.6604556204013439, |
|
"learning_rate": 6.507150650199886e-05, |
|
"loss": 1.439, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.830945558739255, |
|
"grad_norm": 1.5795992163148025, |
|
"learning_rate": 6.399482672088852e-05, |
|
"loss": 1.3767, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8452722063037249, |
|
"grad_norm": 1.7278879653526587, |
|
"learning_rate": 6.291105866858562e-05, |
|
"loss": 1.4084, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8595988538681948, |
|
"grad_norm": 1.4915922467967875, |
|
"learning_rate": 6.18207512652031e-05, |
|
"loss": 1.3751, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8595988538681948, |
|
"eval_loss": 1.3670978546142578, |
|
"eval_runtime": 53.4401, |
|
"eval_samples_per_second": 185.703, |
|
"eval_steps_per_second": 1.46, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8739255014326648, |
|
"grad_norm": 1.5055751912412483, |
|
"learning_rate": 6.0724456742984535e-05, |
|
"loss": 1.3799, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8882521489971347, |
|
"grad_norm": 1.5174029894001677, |
|
"learning_rate": 5.962273036660276e-05, |
|
"loss": 1.372, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9025787965616046, |
|
"grad_norm": 1.6367622819691852, |
|
"learning_rate": 5.851613015192261e-05, |
|
"loss": 1.3514, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9169054441260746, |
|
"grad_norm": 1.556482139125314, |
|
"learning_rate": 5.74052165833702e-05, |
|
"loss": 1.3283, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9312320916905444, |
|
"grad_norm": 1.6450518726375203, |
|
"learning_rate": 5.6290552330051946e-05, |
|
"loss": 1.3257, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9455587392550143, |
|
"grad_norm": 1.3993302544976038, |
|
"learning_rate": 5.5172701960767006e-05, |
|
"loss": 1.3279, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9598853868194842, |
|
"grad_norm": 1.6965193543443504, |
|
"learning_rate": 5.405223165805755e-05, |
|
"loss": 1.2695, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9742120343839542, |
|
"grad_norm": 1.4956063904398584, |
|
"learning_rate": 5.292970893144172e-05, |
|
"loss": 1.2958, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9885386819484241, |
|
"grad_norm": 1.3407355333418884, |
|
"learning_rate": 5.180570232997446e-05, |
|
"loss": 1.2763, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.002865329512894, |
|
"grad_norm": 1.8406934340144963, |
|
"learning_rate": 5.06807811542818e-05, |
|
"loss": 1.1263, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.002865329512894, |
|
"eval_loss": 1.2830663919448853, |
|
"eval_runtime": 53.5866, |
|
"eval_samples_per_second": 185.196, |
|
"eval_steps_per_second": 1.456, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.0171919770773639, |
|
"grad_norm": 1.466971080200883, |
|
"learning_rate": 4.9555515168214514e-05, |
|
"loss": 0.7167, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.0315186246418337, |
|
"grad_norm": 1.3865994721327555, |
|
"learning_rate": 4.8430474310267185e-05, |
|
"loss": 0.6893, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.0458452722063036, |
|
"grad_norm": 1.5632728482959424, |
|
"learning_rate": 4.7306228404908706e-05, |
|
"loss": 0.6935, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.0601719197707737, |
|
"grad_norm": 1.3730426755979892, |
|
"learning_rate": 4.6183346873970704e-05, |
|
"loss": 0.7053, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0744985673352436, |
|
"grad_norm": 1.469352640225314, |
|
"learning_rate": 4.506239844823972e-05, |
|
"loss": 0.6848, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.0888252148997135, |
|
"grad_norm": 1.4293421992017694, |
|
"learning_rate": 4.394395087939947e-05, |
|
"loss": 0.6677, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.1031518624641834, |
|
"grad_norm": 1.497199630056392, |
|
"learning_rate": 4.282857065246908e-05, |
|
"loss": 0.6844, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.1174785100286533, |
|
"grad_norm": 1.4739240120513313, |
|
"learning_rate": 4.1716822698882826e-05, |
|
"loss": 0.6735, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1318051575931232, |
|
"grad_norm": 1.4505373708013058, |
|
"learning_rate": 4.0609270110356625e-05, |
|
"loss": 0.6405, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.146131805157593, |
|
"grad_norm": 1.507546317576944, |
|
"learning_rate": 3.9506473853686586e-05, |
|
"loss": 0.688, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.146131805157593, |
|
"eval_loss": 1.2491989135742188, |
|
"eval_runtime": 53.6109, |
|
"eval_samples_per_second": 185.112, |
|
"eval_steps_per_second": 1.455, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.1604584527220632, |
|
"grad_norm": 1.6233022720214296, |
|
"learning_rate": 3.8408992486623595e-05, |
|
"loss": 0.662, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.174785100286533, |
|
"grad_norm": 1.2645448426600083, |
|
"learning_rate": 3.7317381874968207e-05, |
|
"loss": 0.6901, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.189111747851003, |
|
"grad_norm": 1.4745754772070392, |
|
"learning_rate": 3.6232194911028874e-05, |
|
"loss": 0.6705, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.2034383954154728, |
|
"grad_norm": 1.3592040417066167, |
|
"learning_rate": 3.515398123358627e-05, |
|
"loss": 0.6764, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.2177650429799427, |
|
"grad_norm": 1.3029544883514448, |
|
"learning_rate": 3.40832869495056e-05, |
|
"loss": 0.6761, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.2320916905444126, |
|
"grad_norm": 2.098968282444413, |
|
"learning_rate": 3.302065435713763e-05, |
|
"loss": 0.6275, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2464183381088825, |
|
"grad_norm": 1.3448595848587355, |
|
"learning_rate": 3.196662167164877e-05, |
|
"loss": 0.6342, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.2607449856733524, |
|
"grad_norm": 1.3928584326736022, |
|
"learning_rate": 3.092172275241933e-05, |
|
"loss": 0.6357, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.2750716332378222, |
|
"grad_norm": 1.408932176758002, |
|
"learning_rate": 2.9886486832647835e-05, |
|
"loss": 0.6189, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.2893982808022924, |
|
"grad_norm": 1.3715405151520115, |
|
"learning_rate": 2.8861438251298512e-05, |
|
"loss": 0.6544, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.2893982808022924, |
|
"eval_loss": 1.1817691326141357, |
|
"eval_runtime": 53.5943, |
|
"eval_samples_per_second": 185.169, |
|
"eval_steps_per_second": 1.455, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.3037249283667622, |
|
"grad_norm": 1.188564778298211, |
|
"learning_rate": 2.784709618752776e-05, |
|
"loss": 0.6359, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.3180515759312321, |
|
"grad_norm": 1.2862274938371403, |
|
"learning_rate": 2.6843974397723753e-05, |
|
"loss": 0.621, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.332378223495702, |
|
"grad_norm": 1.316124517086862, |
|
"learning_rate": 2.5852580955293003e-05, |
|
"loss": 0.6265, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.346704871060172, |
|
"grad_norm": 1.3322773186587833, |
|
"learning_rate": 2.4873417993325042e-05, |
|
"loss": 0.6441, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.3610315186246418, |
|
"grad_norm": 1.2889293368365848, |
|
"learning_rate": 2.3906981450265836e-05, |
|
"loss": 0.6173, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.3753581661891117, |
|
"grad_norm": 1.4909707916630004, |
|
"learning_rate": 2.295376081872895e-05, |
|
"loss": 0.6193, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.3896848137535818, |
|
"grad_norm": 1.3688980598451725, |
|
"learning_rate": 2.201423889757122e-05, |
|
"loss": 0.6466, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.4040114613180517, |
|
"grad_norm": 1.3684687851700825, |
|
"learning_rate": 2.108889154735895e-05, |
|
"loss": 0.6124, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.4183381088825215, |
|
"grad_norm": 1.2359362156168956, |
|
"learning_rate": 2.0178187449348208e-05, |
|
"loss": 0.6183, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.4326647564469914, |
|
"grad_norm": 1.2595878525492465, |
|
"learning_rate": 1.9282587868101308e-05, |
|
"loss": 0.6017, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.4326647564469914, |
|
"eval_loss": 1.120739459991455, |
|
"eval_runtime": 53.3734, |
|
"eval_samples_per_second": 185.935, |
|
"eval_steps_per_second": 1.461, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.4469914040114613, |
|
"grad_norm": 1.384961270564966, |
|
"learning_rate": 1.840254641786006e-05, |
|
"loss": 0.5614, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.4613180515759312, |
|
"grad_norm": 1.4232024294510734, |
|
"learning_rate": 1.7538508832793578e-05, |
|
"loss": 0.5797, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.475644699140401, |
|
"grad_norm": 1.1618822443190342, |
|
"learning_rate": 1.669091274123732e-05, |
|
"loss": 0.6184, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.4899713467048712, |
|
"grad_norm": 1.3507488640615957, |
|
"learning_rate": 1.586018744403787e-05, |
|
"loss": 0.5813, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.5042979942693409, |
|
"grad_norm": 1.3443442941285704, |
|
"learning_rate": 1.504675369711529e-05, |
|
"loss": 0.5667, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.518624641833811, |
|
"grad_norm": 1.2877577958996937, |
|
"learning_rate": 1.4251023498353533e-05, |
|
"loss": 0.582, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.5329512893982808, |
|
"grad_norm": 1.1575931882275956, |
|
"learning_rate": 1.3473399878926746e-05, |
|
"loss": 0.563, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.5472779369627507, |
|
"grad_norm": 1.1621133893036653, |
|
"learning_rate": 1.2714276699166994e-05, |
|
"loss": 0.5466, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.5616045845272206, |
|
"grad_norm": 1.228237925890841, |
|
"learning_rate": 1.1974038449077258e-05, |
|
"loss": 0.5239, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.5759312320916905, |
|
"grad_norm": 1.1901152329499263, |
|
"learning_rate": 1.1253060053589997e-05, |
|
"loss": 0.5763, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.5759312320916905, |
|
"eval_loss": 1.0708156824111938, |
|
"eval_runtime": 53.5345, |
|
"eval_samples_per_second": 185.376, |
|
"eval_steps_per_second": 1.457, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.5902578796561606, |
|
"grad_norm": 1.321031783530892, |
|
"learning_rate": 1.0551706682670748e-05, |
|
"loss": 0.5511, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.6045845272206303, |
|
"grad_norm": 1.263841226648576, |
|
"learning_rate": 9.870333566362322e-06, |
|
"loss": 0.5645, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.6189111747851004, |
|
"grad_norm": 1.2601683852009655, |
|
"learning_rate": 9.209285814863477e-06, |
|
"loss": 0.5301, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.63323782234957, |
|
"grad_norm": 1.1853670920081087, |
|
"learning_rate": 8.568898243733386e-06, |
|
"loss": 0.5371, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.6475644699140402, |
|
"grad_norm": 1.2370950402879501, |
|
"learning_rate": 7.949495204310104e-06, |
|
"loss": 0.5555, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.66189111747851, |
|
"grad_norm": 1.3089349313138638, |
|
"learning_rate": 7.351390419429083e-06, |
|
"loss": 0.5616, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.67621776504298, |
|
"grad_norm": 1.3007158953895024, |
|
"learning_rate": 6.774886824525073e-06, |
|
"loss": 0.539, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.6905444126074498, |
|
"grad_norm": 1.2215575024934626, |
|
"learning_rate": 6.2202764141976346e-06, |
|
"loss": 0.5366, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.7048710601719197, |
|
"grad_norm": 1.1917522774279614, |
|
"learning_rate": 5.687840094318125e-06, |
|
"loss": 0.5072, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.7191977077363898, |
|
"grad_norm": 1.2065112378383362, |
|
"learning_rate": 5.177847539753084e-06, |
|
"loss": 0.5599, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.7191977077363898, |
|
"eval_loss": 1.036474347114563, |
|
"eval_runtime": 53.4293, |
|
"eval_samples_per_second": 185.741, |
|
"eval_steps_per_second": 1.46, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.7335243553008595, |
|
"grad_norm": 1.240311618648056, |
|
"learning_rate": 4.690557057775974e-06, |
|
"loss": 0.5367, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.7478510028653296, |
|
"grad_norm": 1.2440347942531507, |
|
"learning_rate": 4.226215457236565e-06, |
|
"loss": 0.5388, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.7621776504297995, |
|
"grad_norm": 1.2299956356695219, |
|
"learning_rate": 3.785057923554197e-06, |
|
"loss": 0.5238, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.7765042979942693, |
|
"grad_norm": 1.3887050995508965, |
|
"learning_rate": 3.36730789959811e-06, |
|
"loss": 0.4981, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.7908309455587392, |
|
"grad_norm": 1.1031870914219795, |
|
"learning_rate": 2.9731769725154514e-06, |
|
"loss": 0.5294, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.8051575931232091, |
|
"grad_norm": 1.4024610914172573, |
|
"learning_rate": 2.6028647665639672e-06, |
|
"loss": 0.5275, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.8194842406876792, |
|
"grad_norm": 1.233183532537092, |
|
"learning_rate": 2.256558842003892e-06, |
|
"loss": 0.5253, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.8338108882521489, |
|
"grad_norm": 1.2308977898401474, |
|
"learning_rate": 1.9344346001001377e-06, |
|
"loss": 0.5442, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.848137535816619, |
|
"grad_norm": 1.1860759958409324, |
|
"learning_rate": 1.6366551942828711e-06, |
|
"loss": 0.5483, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.8624641833810889, |
|
"grad_norm": 1.3493855244370383, |
|
"learning_rate": 1.363371447511619e-06, |
|
"loss": 0.5101, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.8624641833810889, |
|
"eval_loss": 1.0169659852981567, |
|
"eval_runtime": 53.5386, |
|
"eval_samples_per_second": 185.361, |
|
"eval_steps_per_second": 1.457, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.8767908309455588, |
|
"grad_norm": 1.3172256182801068, |
|
"learning_rate": 1.1147217758845751e-06, |
|
"loss": 0.5469, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.8911174785100286, |
|
"grad_norm": 1.2977180424743335, |
|
"learning_rate": 8.908321185319312e-07, |
|
"loss": 0.5319, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.9054441260744985, |
|
"grad_norm": 1.1849694099091805, |
|
"learning_rate": 6.918158738286884e-07, |
|
"loss": 0.5126, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.9197707736389686, |
|
"grad_norm": 1.2190235185481941, |
|
"learning_rate": 5.177738419592515e-07, |
|
"loss": 0.5276, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.9340974212034383, |
|
"grad_norm": 1.1093762195623071, |
|
"learning_rate": 3.687941738629186e-07, |
|
"loss": 0.53, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.9484240687679084, |
|
"grad_norm": 1.0282807771775446, |
|
"learning_rate": 2.449523265861176e-07, |
|
"loss": 0.4903, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.962750716332378, |
|
"grad_norm": 1.3409900754803958, |
|
"learning_rate": 1.4631102506399985e-07, |
|
"loss": 0.5025, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.9770773638968482, |
|
"grad_norm": 1.2011587981462408, |
|
"learning_rate": 7.292023035074813e-08, |
|
"loss": 0.5428, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.991404011461318, |
|
"grad_norm": 1.2347668278605959, |
|
"learning_rate": 2.4817114314662493e-08, |
|
"loss": 0.52, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1396, |
|
"total_flos": 192624989306880.0, |
|
"train_loss": 1.1565231682577926, |
|
"train_runtime": 4692.4705, |
|
"train_samples_per_second": 38.067, |
|
"train_steps_per_second": 0.297 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1396, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 192624989306880.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|