|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.985250737463127, |
|
"eval_steps": 500, |
|
"global_step": 845, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"grad_norm": 1.106083567904979, |
|
"learning_rate": 1.1764705882352942e-07, |
|
"loss": 0.7816, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.029498525073746312, |
|
"grad_norm": 1.1433080381542629, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 0.7757, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.058997050147492625, |
|
"grad_norm": 0.997014214705184, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 0.7736, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 1.304994737635826, |
|
"learning_rate": 1.7647058823529414e-06, |
|
"loss": 0.7594, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11799410029498525, |
|
"grad_norm": 0.5741459890364078, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 0.7332, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14749262536873156, |
|
"grad_norm": 0.4736057964790196, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 0.7054, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 0.28699264838960103, |
|
"learning_rate": 3.529411764705883e-06, |
|
"loss": 0.6613, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.20648967551622419, |
|
"grad_norm": 1.92856903253004, |
|
"learning_rate": 4.11764705882353e-06, |
|
"loss": 0.6422, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2359882005899705, |
|
"grad_norm": 0.346137515731761, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 0.6165, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 0.17870145514430266, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 0.6133, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"grad_norm": 0.1437606973005659, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 0.5986, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32448377581120946, |
|
"grad_norm": 0.2706264861398608, |
|
"learning_rate": 6.470588235294119e-06, |
|
"loss": 0.5884, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.6332617880988516, |
|
"learning_rate": 7.058823529411766e-06, |
|
"loss": 0.5818, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3834808259587021, |
|
"grad_norm": 0.1015116079049458, |
|
"learning_rate": 7.647058823529411e-06, |
|
"loss": 0.5798, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.41297935103244837, |
|
"grad_norm": 0.12119348356809045, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 0.5751, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 0.07879965059698645, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 0.5708, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.471976401179941, |
|
"grad_norm": 0.05646247242948782, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 0.5683, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5014749262536873, |
|
"grad_norm": 0.25525240715915204, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5662, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 0.05365048303720814, |
|
"learning_rate": 9.998932083939657e-06, |
|
"loss": 0.5598, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.56047197640118, |
|
"grad_norm": 0.1595860307142945, |
|
"learning_rate": 9.995728791936505e-06, |
|
"loss": 0.5538, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"grad_norm": 0.06609453133465848, |
|
"learning_rate": 9.990391492329341e-06, |
|
"loss": 0.5529, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 1.6293266027560704, |
|
"learning_rate": 9.98292246503335e-06, |
|
"loss": 0.5527, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6489675516224189, |
|
"grad_norm": 1.050544643875783, |
|
"learning_rate": 9.973324900566214e-06, |
|
"loss": 0.5477, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6784660766961652, |
|
"grad_norm": 0.052879637742834044, |
|
"learning_rate": 9.961602898685225e-06, |
|
"loss": 0.5474, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.04928862663129842, |
|
"learning_rate": 9.947761466636014e-06, |
|
"loss": 0.5553, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7374631268436578, |
|
"grad_norm": 0.052753160381468045, |
|
"learning_rate": 9.931806517013612e-06, |
|
"loss": 0.5464, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7669616519174042, |
|
"grad_norm": 0.045802114899820966, |
|
"learning_rate": 9.913744865236798e-06, |
|
"loss": 0.5404, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 0.11489905003299361, |
|
"learning_rate": 9.893584226636773e-06, |
|
"loss": 0.5317, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8259587020648967, |
|
"grad_norm": 0.4919380291913098, |
|
"learning_rate": 9.871333213161438e-06, |
|
"loss": 0.5481, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.855457227138643, |
|
"grad_norm": 0.3232334093103194, |
|
"learning_rate": 9.847001329696653e-06, |
|
"loss": 0.5339, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 0.05048150556921708, |
|
"learning_rate": 9.820598970006068e-06, |
|
"loss": 0.5325, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9144542772861357, |
|
"grad_norm": 0.058183934565565226, |
|
"learning_rate": 9.792137412291265e-06, |
|
"loss": 0.5373, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.943952802359882, |
|
"grad_norm": 2.1026211814523927, |
|
"learning_rate": 9.761628814374074e-06, |
|
"loss": 0.535, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 0.05037291432057576, |
|
"learning_rate": 9.729086208503174e-06, |
|
"loss": 0.5392, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9970501474926253, |
|
"eval_loss": 0.5948278307914734, |
|
"eval_runtime": 7.8455, |
|
"eval_samples_per_second": 16.825, |
|
"eval_steps_per_second": 0.382, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.0029498525073746, |
|
"grad_norm": 0.07970296331803481, |
|
"learning_rate": 9.694523495787149e-06, |
|
"loss": 0.5852, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0324483775811208, |
|
"grad_norm": 0.06217085534785087, |
|
"learning_rate": 9.657955440256396e-06, |
|
"loss": 0.5318, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0619469026548674, |
|
"grad_norm": 0.4533798599174413, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.5265, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0914454277286136, |
|
"grad_norm": 0.0843198746785751, |
|
"learning_rate": 9.578866633275289e-06, |
|
"loss": 0.525, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.12094395280236, |
|
"grad_norm": 0.0514110786588162, |
|
"learning_rate": 9.536379665907801e-06, |
|
"loss": 0.5244, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1504424778761062, |
|
"grad_norm": 0.08421706916029806, |
|
"learning_rate": 9.491954909459895e-06, |
|
"loss": 0.5241, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1799410029498525, |
|
"grad_norm": 0.058452224546329916, |
|
"learning_rate": 9.445611340695926e-06, |
|
"loss": 0.5274, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.2094395280235988, |
|
"grad_norm": 1.2314254826441438, |
|
"learning_rate": 9.397368756032445e-06, |
|
"loss": 0.5247, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.238938053097345, |
|
"grad_norm": 0.04772904502073005, |
|
"learning_rate": 9.347247763081834e-06, |
|
"loss": 0.5276, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.2684365781710913, |
|
"grad_norm": 0.05022705703233357, |
|
"learning_rate": 9.295269771849426e-06, |
|
"loss": 0.5179, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.2979351032448379, |
|
"grad_norm": 0.2724781073186335, |
|
"learning_rate": 9.241456985587868e-06, |
|
"loss": 0.5229, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.3274336283185841, |
|
"grad_norm": 0.07955839264628826, |
|
"learning_rate": 9.185832391312644e-06, |
|
"loss": 0.5222, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.3569321533923304, |
|
"grad_norm": 0.047043054214154625, |
|
"learning_rate": 9.12841974998278e-06, |
|
"loss": 0.5195, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3864306784660767, |
|
"grad_norm": 0.045497809508961325, |
|
"learning_rate": 9.069243586350976e-06, |
|
"loss": 0.5148, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.415929203539823, |
|
"grad_norm": 0.8566869256944054, |
|
"learning_rate": 9.008329178487442e-06, |
|
"loss": 0.5207, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4454277286135693, |
|
"grad_norm": 0.06712694488880502, |
|
"learning_rate": 8.94570254698197e-06, |
|
"loss": 0.5219, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.4749262536873156, |
|
"grad_norm": 0.134636628527114, |
|
"learning_rate": 8.881390443828788e-06, |
|
"loss": 0.5164, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.504424778761062, |
|
"grad_norm": 1.017642297039446, |
|
"learning_rate": 8.815420340999034e-06, |
|
"loss": 0.5164, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.5339233038348081, |
|
"grad_norm": 0.8748816419357938, |
|
"learning_rate": 8.747820418705632e-06, |
|
"loss": 0.519, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.5634218289085546, |
|
"grad_norm": 0.05916211446645424, |
|
"learning_rate": 8.67861955336566e-06, |
|
"loss": 0.5132, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.592920353982301, |
|
"grad_norm": 0.06267760570910012, |
|
"learning_rate": 8.607847305265312e-06, |
|
"loss": 0.5119, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.6224188790560472, |
|
"grad_norm": 0.053722149220927, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.514, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.6519174041297935, |
|
"grad_norm": 0.055409146579795114, |
|
"learning_rate": 8.461710245224149e-06, |
|
"loss": 0.5231, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.6814159292035398, |
|
"grad_norm": 0.049718992850954775, |
|
"learning_rate": 8.386407858128707e-06, |
|
"loss": 0.5127, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.7109144542772863, |
|
"grad_norm": 0.0443810313683345, |
|
"learning_rate": 8.309658911297833e-06, |
|
"loss": 0.5116, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.7404129793510323, |
|
"grad_norm": 0.05127452771633588, |
|
"learning_rate": 8.231496189304704e-06, |
|
"loss": 0.5078, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.7699115044247788, |
|
"grad_norm": 0.04591077767546143, |
|
"learning_rate": 8.151953080639777e-06, |
|
"loss": 0.5125, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.799410029498525, |
|
"grad_norm": 0.04535715037949931, |
|
"learning_rate": 8.071063563448341e-06, |
|
"loss": 0.5131, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.8289085545722714, |
|
"grad_norm": 0.045271495366352554, |
|
"learning_rate": 7.988862191016204e-06, |
|
"loss": 0.5123, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.8584070796460177, |
|
"grad_norm": 0.056004335836578926, |
|
"learning_rate": 7.905384077009693e-06, |
|
"loss": 0.5097, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.887905604719764, |
|
"grad_norm": 0.04430378987199138, |
|
"learning_rate": 7.820664880476257e-06, |
|
"loss": 0.5143, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.9174041297935103, |
|
"grad_norm": 0.04609433007960605, |
|
"learning_rate": 7.734740790612137e-06, |
|
"loss": 0.5086, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.9469026548672566, |
|
"grad_norm": 0.7820689144245092, |
|
"learning_rate": 7.647648511303545e-06, |
|
"loss": 0.5095, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.976401179941003, |
|
"grad_norm": 0.049876695974305066, |
|
"learning_rate": 7.559425245448006e-06, |
|
"loss": 0.5116, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.569428026676178, |
|
"eval_runtime": 7.9223, |
|
"eval_samples_per_second": 16.662, |
|
"eval_steps_per_second": 0.379, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.005899705014749, |
|
"grad_norm": 0.059860668705846505, |
|
"learning_rate": 7.470108679062521e-06, |
|
"loss": 0.5565, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.0353982300884956, |
|
"grad_norm": 0.045373657400373905, |
|
"learning_rate": 7.379736965185369e-06, |
|
"loss": 0.5022, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.0648967551622417, |
|
"grad_norm": 0.04575003134013503, |
|
"learning_rate": 7.288348707578409e-06, |
|
"loss": 0.5009, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.094395280235988, |
|
"grad_norm": 0.0764963225778181, |
|
"learning_rate": 7.195982944236853e-06, |
|
"loss": 0.5023, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.1238938053097347, |
|
"grad_norm": 0.2864046551111833, |
|
"learning_rate": 7.102679130713538e-06, |
|
"loss": 0.5027, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.1533923303834808, |
|
"grad_norm": 0.0726950183868685, |
|
"learning_rate": 7.008477123264849e-06, |
|
"loss": 0.4995, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.1828908554572273, |
|
"grad_norm": 0.047612550060808626, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.5073, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.2123893805309733, |
|
"grad_norm": 0.045980323605959564, |
|
"learning_rate": 6.817539852819149e-06, |
|
"loss": 0.5066, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.24188790560472, |
|
"grad_norm": 0.05019367448115624, |
|
"learning_rate": 6.720886151813194e-06, |
|
"loss": 0.5054, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.271386430678466, |
|
"grad_norm": 0.04465323510528684, |
|
"learning_rate": 6.6234973460234184e-06, |
|
"loss": 0.5003, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.3008849557522124, |
|
"grad_norm": 0.08235286259691993, |
|
"learning_rate": 6.525415036677745e-06, |
|
"loss": 0.5001, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.330383480825959, |
|
"grad_norm": 0.050999382045359705, |
|
"learning_rate": 6.426681121245527e-06, |
|
"loss": 0.498, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.359882005899705, |
|
"grad_norm": 0.04526036928138616, |
|
"learning_rate": 6.327337775540362e-06, |
|
"loss": 0.5018, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.3893805309734515, |
|
"grad_norm": 0.05806925044679012, |
|
"learning_rate": 6.227427435703997e-06, |
|
"loss": 0.4955, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.4188790560471976, |
|
"grad_norm": 0.06164688967940204, |
|
"learning_rate": 6.126992780079032e-06, |
|
"loss": 0.4971, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.448377581120944, |
|
"grad_norm": 0.5078396005551373, |
|
"learning_rate": 6.026076710978172e-06, |
|
"loss": 0.5015, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.47787610619469, |
|
"grad_norm": 0.046783560822131726, |
|
"learning_rate": 5.924722336357793e-06, |
|
"loss": 0.4975, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.5073746312684366, |
|
"grad_norm": 0.04594136178012529, |
|
"learning_rate": 5.82297295140367e-06, |
|
"loss": 0.4996, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.5368731563421827, |
|
"grad_norm": 0.05594049666474868, |
|
"learning_rate": 5.720872020036734e-06, |
|
"loss": 0.5001, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.566371681415929, |
|
"grad_norm": 0.04465947789722648, |
|
"learning_rate": 5.61846315634674e-06, |
|
"loss": 0.4924, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.5958702064896757, |
|
"grad_norm": 0.060580400184871375, |
|
"learning_rate": 5.515790105961785e-06, |
|
"loss": 0.5022, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.6253687315634218, |
|
"grad_norm": 0.12381786538248248, |
|
"learning_rate": 5.412896727361663e-06, |
|
"loss": 0.4972, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.6548672566371683, |
|
"grad_norm": 0.05202660251491312, |
|
"learning_rate": 5.309826973142974e-06, |
|
"loss": 0.5071, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.6843657817109143, |
|
"grad_norm": 0.10377000214654016, |
|
"learning_rate": 5.206624871244066e-06, |
|
"loss": 0.4995, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.713864306784661, |
|
"grad_norm": 0.044378951816130024, |
|
"learning_rate": 5.103334506137773e-06, |
|
"loss": 0.4974, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.7433628318584073, |
|
"grad_norm": 0.04467225886658485, |
|
"learning_rate": 5e-06, |
|
"loss": 0.497, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.7728613569321534, |
|
"grad_norm": 0.051986874546360556, |
|
"learning_rate": 4.89666549386223e-06, |
|
"loss": 0.4971, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.8023598820058995, |
|
"grad_norm": 0.12453400877778924, |
|
"learning_rate": 4.793375128755934e-06, |
|
"loss": 0.4989, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.831858407079646, |
|
"grad_norm": 0.04420192438109124, |
|
"learning_rate": 4.690173026857028e-06, |
|
"loss": 0.4965, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.8613569321533925, |
|
"grad_norm": 0.043489134164912784, |
|
"learning_rate": 4.587103272638339e-06, |
|
"loss": 0.5001, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.8908554572271385, |
|
"grad_norm": 0.057345889273731514, |
|
"learning_rate": 4.4842098940382155e-06, |
|
"loss": 0.4965, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.920353982300885, |
|
"grad_norm": 0.04346872325174773, |
|
"learning_rate": 4.381536843653262e-06, |
|
"loss": 0.4973, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.949852507374631, |
|
"grad_norm": 0.05768914593482109, |
|
"learning_rate": 4.279127979963266e-06, |
|
"loss": 0.5033, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.9793510324483776, |
|
"grad_norm": 0.07469078383028477, |
|
"learning_rate": 4.17702704859633e-06, |
|
"loss": 0.4932, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.9970501474926254, |
|
"eval_loss": 0.5568187236785889, |
|
"eval_runtime": 7.8687, |
|
"eval_samples_per_second": 16.775, |
|
"eval_steps_per_second": 0.381, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.0088495575221237, |
|
"grad_norm": 0.2325348943012431, |
|
"learning_rate": 4.075277663642208e-06, |
|
"loss": 0.5464, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.03834808259587, |
|
"grad_norm": 0.05925612576951284, |
|
"learning_rate": 3.973923289021829e-06, |
|
"loss": 0.4879, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.0678466076696167, |
|
"grad_norm": 0.04576359473331283, |
|
"learning_rate": 3.8730072199209705e-06, |
|
"loss": 0.4958, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.0973451327433628, |
|
"grad_norm": 0.04411225192878531, |
|
"learning_rate": 3.7725725642960047e-06, |
|
"loss": 0.4937, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.1268436578171093, |
|
"grad_norm": 0.04681950811111515, |
|
"learning_rate": 3.67266222445964e-06, |
|
"loss": 0.497, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.1563421828908553, |
|
"grad_norm": 0.05140593649320022, |
|
"learning_rate": 3.573318878754475e-06, |
|
"loss": 0.4908, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.185840707964602, |
|
"grad_norm": 0.04848431413605644, |
|
"learning_rate": 3.4745849633222566e-06, |
|
"loss": 0.4907, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.215339233038348, |
|
"grad_norm": 0.11169113540037036, |
|
"learning_rate": 3.3765026539765832e-06, |
|
"loss": 0.4954, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.2448377581120944, |
|
"grad_norm": 0.045625351277697936, |
|
"learning_rate": 3.2791138481868084e-06, |
|
"loss": 0.4875, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.274336283185841, |
|
"grad_norm": 0.04470601186507566, |
|
"learning_rate": 3.1824601471808504e-06, |
|
"loss": 0.4913, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.303834808259587, |
|
"grad_norm": 0.04510846620481134, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.4977, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.04641841941445331, |
|
"learning_rate": 2.991522876735154e-06, |
|
"loss": 0.492, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.3628318584070795, |
|
"grad_norm": 0.044008859582161636, |
|
"learning_rate": 2.8973208692864623e-06, |
|
"loss": 0.4927, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.392330383480826, |
|
"grad_norm": 0.058212049337815264, |
|
"learning_rate": 2.804017055763149e-06, |
|
"loss": 0.4934, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.421828908554572, |
|
"grad_norm": 0.04488763032020628, |
|
"learning_rate": 2.711651292421593e-06, |
|
"loss": 0.4896, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.4513274336283186, |
|
"grad_norm": 0.06877731409673225, |
|
"learning_rate": 2.6202630348146323e-06, |
|
"loss": 0.4927, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.4808259587020647, |
|
"grad_norm": 0.04405767793752617, |
|
"learning_rate": 2.529891320937481e-06, |
|
"loss": 0.4885, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.510324483775811, |
|
"grad_norm": 0.046525497495949794, |
|
"learning_rate": 2.4405747545519966e-06, |
|
"loss": 0.493, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.5398230088495577, |
|
"grad_norm": 0.04464423997103832, |
|
"learning_rate": 2.352351488696457e-06, |
|
"loss": 0.4871, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.5693215339233038, |
|
"grad_norm": 0.04487754260652396, |
|
"learning_rate": 2.265259209387867e-06, |
|
"loss": 0.4897, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.5988200589970503, |
|
"grad_norm": 0.06310865942039023, |
|
"learning_rate": 2.179335119523745e-06, |
|
"loss": 0.4929, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.6283185840707963, |
|
"grad_norm": 0.04443946832576421, |
|
"learning_rate": 2.094615922990309e-06, |
|
"loss": 0.4938, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.657817109144543, |
|
"grad_norm": 0.04515597574554685, |
|
"learning_rate": 2.0111378089837958e-06, |
|
"loss": 0.4882, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.6873156342182893, |
|
"grad_norm": 0.04358107380856759, |
|
"learning_rate": 1.928936436551661e-06, |
|
"loss": 0.4903, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.7168141592920354, |
|
"grad_norm": 0.045563864404630304, |
|
"learning_rate": 1.848046919360225e-06, |
|
"loss": 0.4921, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.7463126843657815, |
|
"grad_norm": 0.04666731006378484, |
|
"learning_rate": 1.7685038106952952e-06, |
|
"loss": 0.4934, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.775811209439528, |
|
"grad_norm": 0.07117810484547947, |
|
"learning_rate": 1.6903410887021676e-06, |
|
"loss": 0.4892, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.8053097345132745, |
|
"grad_norm": 0.23100334767483371, |
|
"learning_rate": 1.6135921418712959e-06, |
|
"loss": 0.493, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.8348082595870205, |
|
"grad_norm": 0.04506424951487985, |
|
"learning_rate": 1.5382897547758513e-06, |
|
"loss": 0.484, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.864306784660767, |
|
"grad_norm": 0.10488547297270173, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.4877, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.893805309734513, |
|
"grad_norm": 0.04390462723720627, |
|
"learning_rate": 1.3921526947346902e-06, |
|
"loss": 0.4887, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.9233038348082596, |
|
"grad_norm": 0.059362742241561586, |
|
"learning_rate": 1.321380446634342e-06, |
|
"loss": 0.4943, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.952802359882006, |
|
"grad_norm": 0.05424105447849983, |
|
"learning_rate": 1.2521795812943704e-06, |
|
"loss": 0.4871, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.982300884955752, |
|
"grad_norm": 0.04260561612932512, |
|
"learning_rate": 1.1845796590009684e-06, |
|
"loss": 0.4874, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.5515686869621277, |
|
"eval_runtime": 7.9174, |
|
"eval_samples_per_second": 16.672, |
|
"eval_steps_per_second": 0.379, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.011799410029498, |
|
"grad_norm": 0.04414788535025088, |
|
"learning_rate": 1.118609556171213e-06, |
|
"loss": 0.535, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.041297935103245, |
|
"grad_norm": 0.04732115155993842, |
|
"learning_rate": 1.0542974530180327e-06, |
|
"loss": 0.4859, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.070796460176991, |
|
"grad_norm": 0.04337501922937707, |
|
"learning_rate": 9.916708215125586e-07, |
|
"loss": 0.4893, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.100294985250738, |
|
"grad_norm": 0.052054539750301554, |
|
"learning_rate": 9.307564136490255e-07, |
|
"loss": 0.4837, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.129793510324483, |
|
"grad_norm": 0.043254413851663974, |
|
"learning_rate": 8.715802500172215e-07, |
|
"loss": 0.4897, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.15929203539823, |
|
"grad_norm": 0.05537638890929335, |
|
"learning_rate": 8.141676086873574e-07, |
|
"loss": 0.4893, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.188790560471976, |
|
"grad_norm": 0.04519429234937821, |
|
"learning_rate": 7.585430144121319e-07, |
|
"loss": 0.4871, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.218289085545723, |
|
"grad_norm": 0.09284032958781858, |
|
"learning_rate": 7.047302281505735e-07, |
|
"loss": 0.4829, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.247787610619469, |
|
"grad_norm": 0.05759138824322539, |
|
"learning_rate": 6.527522369181655e-07, |
|
"loss": 0.4904, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.277286135693215, |
|
"grad_norm": 0.04524068280213349, |
|
"learning_rate": 6.026312439675553e-07, |
|
"loss": 0.4841, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.3067846607669615, |
|
"grad_norm": 0.04432058714882555, |
|
"learning_rate": 5.543886593040737e-07, |
|
"loss": 0.4843, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.336283185840708, |
|
"grad_norm": 0.04232360342920672, |
|
"learning_rate": 5.080450905401057e-07, |
|
"loss": 0.4928, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.3657817109144545, |
|
"grad_norm": 0.12702550471244367, |
|
"learning_rate": 4.6362033409220077e-07, |
|
"loss": 0.4951, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.395280235988201, |
|
"grad_norm": 0.0473973137647136, |
|
"learning_rate": 4.211333667247125e-07, |
|
"loss": 0.4931, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.424778761061947, |
|
"grad_norm": 0.043912927697138504, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.4883, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.454277286135693, |
|
"grad_norm": 0.04305758726287443, |
|
"learning_rate": 3.420445597436056e-07, |
|
"loss": 0.4877, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.48377581120944, |
|
"grad_norm": 0.04286327782385863, |
|
"learning_rate": 3.0547650421285216e-07, |
|
"loss": 0.4865, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.513274336283186, |
|
"grad_norm": 0.04387385477938106, |
|
"learning_rate": 2.7091379149682683e-07, |
|
"loss": 0.4891, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.542772861356932, |
|
"grad_norm": 0.042851842666364756, |
|
"learning_rate": 2.3837118562592799e-07, |
|
"loss": 0.4886, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.572271386430678, |
|
"grad_norm": 0.04267358478101285, |
|
"learning_rate": 2.0786258770873647e-07, |
|
"loss": 0.4896, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.601769911504425, |
|
"grad_norm": 0.050416063074236905, |
|
"learning_rate": 1.7940102999393194e-07, |
|
"loss": 0.4875, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.631268436578171, |
|
"grad_norm": 0.043881002529456536, |
|
"learning_rate": 1.5299867030334815e-07, |
|
"loss": 0.4854, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.660766961651918, |
|
"grad_norm": 0.06742603559290723, |
|
"learning_rate": 1.286667868385627e-07, |
|
"loss": 0.4833, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.6902654867256635, |
|
"grad_norm": 0.04689112762822426, |
|
"learning_rate": 1.0641577336322761e-07, |
|
"loss": 0.4859, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.71976401179941, |
|
"grad_norm": 0.04458797008950871, |
|
"learning_rate": 8.625513476320291e-08, |
|
"loss": 0.486, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.7492625368731565, |
|
"grad_norm": 0.043403401141790046, |
|
"learning_rate": 6.819348298638839e-08, |
|
"loss": 0.4835, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.778761061946903, |
|
"grad_norm": 0.04203084473909298, |
|
"learning_rate": 5.223853336398632e-08, |
|
"loss": 0.4846, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.808259587020649, |
|
"grad_norm": 0.046069003410869176, |
|
"learning_rate": 3.839710131477492e-08, |
|
"loss": 0.4887, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.837758112094395, |
|
"grad_norm": 0.04757707619938308, |
|
"learning_rate": 2.6675099433787212e-08, |
|
"loss": 0.4878, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.867256637168142, |
|
"grad_norm": 0.042268713033133586, |
|
"learning_rate": 1.7077534966650767e-08, |
|
"loss": 0.4882, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.896755162241888, |
|
"grad_norm": 0.04302102116033562, |
|
"learning_rate": 9.608507670659239e-09, |
|
"loss": 0.4888, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.926253687315635, |
|
"grad_norm": 0.07913283260437641, |
|
"learning_rate": 4.2712080634949024e-09, |
|
"loss": 0.4839, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.95575221238938, |
|
"grad_norm": 0.04332335955644444, |
|
"learning_rate": 1.0679160603449533e-09, |
|
"loss": 0.4917, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.985250737463127, |
|
"grad_norm": 0.04328053326566176, |
|
"learning_rate": 0.0, |
|
"loss": 0.4883, |
|
"step": 845 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 845, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 56, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2830394185482240.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|