|
{ |
|
"best_metric": 0.9601866602897644, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.07561436672967864, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005040957781978576, |
|
"grad_norm": 0.7851808667182922, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9219, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005040957781978576, |
|
"eval_loss": 1.1706854104995728, |
|
"eval_runtime": 70.2236, |
|
"eval_samples_per_second": 47.577, |
|
"eval_steps_per_second": 11.905, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010081915563957152, |
|
"grad_norm": 0.8648802042007446, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0097, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0015122873345935729, |
|
"grad_norm": 0.731093168258667, |
|
"learning_rate": 3e-05, |
|
"loss": 0.9617, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0020163831127914303, |
|
"grad_norm": 0.6446400880813599, |
|
"learning_rate": 4e-05, |
|
"loss": 0.9963, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002520478890989288, |
|
"grad_norm": 0.6305974125862122, |
|
"learning_rate": 5e-05, |
|
"loss": 0.956, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0030245746691871457, |
|
"grad_norm": 0.852928876876831, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9513, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003528670447385003, |
|
"grad_norm": 0.8347710371017456, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9826, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004032766225582861, |
|
"grad_norm": 0.7358830571174622, |
|
"learning_rate": 8e-05, |
|
"loss": 1.0078, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004536862003780718, |
|
"grad_norm": 0.6131141185760498, |
|
"learning_rate": 9e-05, |
|
"loss": 0.966, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005040957781978576, |
|
"grad_norm": 0.6628008484840393, |
|
"learning_rate": 0.0001, |
|
"loss": 1.0056, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005545053560176433, |
|
"grad_norm": 0.6426183581352234, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.0338, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0060491493383742915, |
|
"grad_norm": 0.6265701651573181, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.9388, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.006553245116572149, |
|
"grad_norm": 0.6738749146461487, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.9911, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007057340894770006, |
|
"grad_norm": 0.6448192000389099, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.9823, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.007561436672967864, |
|
"grad_norm": 0.5898854732513428, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.9779, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008065532451165721, |
|
"grad_norm": 0.5653546452522278, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.9173, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00856962822936358, |
|
"grad_norm": 0.5700259804725647, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.9083, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.009073724007561436, |
|
"grad_norm": 0.5963847041130066, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.0042, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.009577819785759294, |
|
"grad_norm": 0.5476916432380676, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.9457, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.010081915563957152, |
|
"grad_norm": 0.5964701771736145, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.9946, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01058601134215501, |
|
"grad_norm": 0.6130715012550354, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.9332, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.011090107120352867, |
|
"grad_norm": 0.6350668668746948, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.0227, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.011594202898550725, |
|
"grad_norm": 0.6542335152626038, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.9471, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.012098298676748583, |
|
"grad_norm": 0.624995768070221, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.9539, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01260239445494644, |
|
"grad_norm": 0.6840269565582275, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.9486, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013106490233144297, |
|
"grad_norm": 0.7418709397315979, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.9635, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.013610586011342156, |
|
"grad_norm": 0.7369248867034912, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.035, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.014114681789540012, |
|
"grad_norm": 0.6638664603233337, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.8968, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01461877756773787, |
|
"grad_norm": 0.741362988948822, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9482, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.015122873345935728, |
|
"grad_norm": 0.8223165273666382, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.0101, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015626969124133586, |
|
"grad_norm": 0.8499884009361267, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.0493, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.016131064902331443, |
|
"grad_norm": 0.8594983816146851, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.0085, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0166351606805293, |
|
"grad_norm": 1.0113023519515991, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.939, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01713925645872716, |
|
"grad_norm": 1.016897439956665, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.1272, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.017643352236925015, |
|
"grad_norm": 1.1077070236206055, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.1256, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.018147448015122872, |
|
"grad_norm": 1.234421968460083, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.0987, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01865154379332073, |
|
"grad_norm": 1.2606217861175537, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.9812, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.019155639571518588, |
|
"grad_norm": 1.360200047492981, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.9274, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.019659735349716444, |
|
"grad_norm": 1.617262840270996, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.08, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.020163831127914304, |
|
"grad_norm": 1.4929728507995605, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.9906, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02066792690611216, |
|
"grad_norm": 1.6736871004104614, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.114, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02117202268431002, |
|
"grad_norm": 1.995395541191101, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.8387, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.021676118462507877, |
|
"grad_norm": 2.214477777481079, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.1417, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.022180214240705733, |
|
"grad_norm": 2.147244453430176, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.1272, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.022684310018903593, |
|
"grad_norm": 2.3361599445343018, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.1843, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02318840579710145, |
|
"grad_norm": 2.324315071105957, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.234, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.023692501575299306, |
|
"grad_norm": 2.6563589572906494, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.0599, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.024196597353497166, |
|
"grad_norm": 3.113487482070923, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.5183, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.024700693131695022, |
|
"grad_norm": 3.193916082382202, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.2843, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02520478890989288, |
|
"grad_norm": 4.234517574310303, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.4933, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02520478890989288, |
|
"eval_loss": 1.1050190925598145, |
|
"eval_runtime": 70.0275, |
|
"eval_samples_per_second": 47.71, |
|
"eval_steps_per_second": 11.938, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02570888468809074, |
|
"grad_norm": 1.292781949043274, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.9901, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.026212980466288595, |
|
"grad_norm": 1.1206916570663452, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.0152, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02671707624448645, |
|
"grad_norm": 0.8595021367073059, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.9863, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02722117202268431, |
|
"grad_norm": 0.6013736724853516, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.0146, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.027725267800882167, |
|
"grad_norm": 0.5559196472167969, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.9531, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.028229363579080024, |
|
"grad_norm": 0.5038899779319763, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.9519, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.028733459357277884, |
|
"grad_norm": 0.44719502329826355, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.9766, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.02923755513547574, |
|
"grad_norm": 0.4539150297641754, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.9853, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.029741650913673597, |
|
"grad_norm": 0.45345962047576904, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.9229, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.030245746691871456, |
|
"grad_norm": 0.4743633568286896, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.9564, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.030749842470069313, |
|
"grad_norm": 0.484157532453537, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.9582, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03125393824826717, |
|
"grad_norm": 0.44355064630508423, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.9991, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03175803402646503, |
|
"grad_norm": 0.40092796087265015, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.915, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.032262129804662885, |
|
"grad_norm": 0.3976602554321289, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.891, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03276622558286074, |
|
"grad_norm": 0.41375917196273804, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.9286, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0332703213610586, |
|
"grad_norm": 0.4368148148059845, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.9392, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03377441713925646, |
|
"grad_norm": 0.4674438536167145, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.9399, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03427851291745432, |
|
"grad_norm": 0.4300023913383484, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.9015, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.034782608695652174, |
|
"grad_norm": 0.44563958048820496, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.9133, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03528670447385003, |
|
"grad_norm": 0.4508240222930908, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.9124, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03579080025204789, |
|
"grad_norm": 0.48746979236602783, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.9495, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.036294896030245744, |
|
"grad_norm": 0.5286091566085815, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.9482, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03679899180844361, |
|
"grad_norm": 0.5450381636619568, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.9255, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03730308758664146, |
|
"grad_norm": 0.5480132102966309, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.9612, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03780718336483932, |
|
"grad_norm": 0.5725929737091064, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.0166, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.038311279143037176, |
|
"grad_norm": 0.5540945529937744, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.9842, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03881537492123503, |
|
"grad_norm": 0.593117356300354, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.9666, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03931947069943289, |
|
"grad_norm": 0.7941222190856934, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.9603, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03982356647763075, |
|
"grad_norm": 0.6581109166145325, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.9778, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04032766225582861, |
|
"grad_norm": 0.707141101360321, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.0769, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.040831758034026465, |
|
"grad_norm": 0.7098904252052307, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.0253, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04133585381222432, |
|
"grad_norm": 0.725770115852356, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.9254, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04183994959042218, |
|
"grad_norm": 0.8920672535896301, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.9655, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04234404536862004, |
|
"grad_norm": 0.8527565002441406, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.924, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0428481411468179, |
|
"grad_norm": 0.9886060357093811, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.8559, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.043352236925015754, |
|
"grad_norm": 0.9404958486557007, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.7445, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04385633270321361, |
|
"grad_norm": 1.1955063343048096, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.0882, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04436042848141147, |
|
"grad_norm": 1.2329496145248413, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.8986, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04486452425960932, |
|
"grad_norm": 1.3696224689483643, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.0774, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.045368620037807186, |
|
"grad_norm": 1.3210687637329102, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.8344, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04587271581600504, |
|
"grad_norm": 1.2450342178344727, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.9926, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0463768115942029, |
|
"grad_norm": 1.536271572113037, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.9797, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.046880907372400756, |
|
"grad_norm": 1.8610961437225342, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.1009, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04738500315059861, |
|
"grad_norm": 1.8540403842926025, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.1389, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04788909892879647, |
|
"grad_norm": 2.1709673404693604, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.9783, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04839319470699433, |
|
"grad_norm": 2.641937494277954, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.1573, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04889729048519219, |
|
"grad_norm": 2.51130747795105, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.1294, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.049401386263390044, |
|
"grad_norm": 2.792402505874634, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.3566, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0499054820415879, |
|
"grad_norm": 3.161710023880005, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.4144, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05040957781978576, |
|
"grad_norm": 6.212632656097412, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.7334, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05040957781978576, |
|
"eval_loss": 1.0116993188858032, |
|
"eval_runtime": 70.2364, |
|
"eval_samples_per_second": 47.568, |
|
"eval_steps_per_second": 11.903, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.050913673597983614, |
|
"grad_norm": 0.7841265797615051, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.8478, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05141776937618148, |
|
"grad_norm": 0.7918402552604675, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.9741, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05192186515437933, |
|
"grad_norm": 0.7663191556930542, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 1.061, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05242596093257719, |
|
"grad_norm": 0.5820767879486084, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.9389, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.052930056710775046, |
|
"grad_norm": 0.4415889084339142, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9897, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0534341524889729, |
|
"grad_norm": 0.39681074023246765, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.9521, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.053938248267170766, |
|
"grad_norm": 0.40437906980514526, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.9214, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05444234404536862, |
|
"grad_norm": 0.3815537095069885, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.94, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05494643982356648, |
|
"grad_norm": 0.38426071405410767, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.9144, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.055450535601764335, |
|
"grad_norm": 0.38166847825050354, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.9569, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05595463137996219, |
|
"grad_norm": 0.3796461224555969, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.9856, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05645872715816005, |
|
"grad_norm": 0.3643120527267456, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.9217, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.05696282293635791, |
|
"grad_norm": 0.384927362203598, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.911, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.05746691871455577, |
|
"grad_norm": 0.38762205839157104, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.8977, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.057971014492753624, |
|
"grad_norm": 0.3947840929031372, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.9479, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05847511027095148, |
|
"grad_norm": 0.40712770819664, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.9162, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.05897920604914934, |
|
"grad_norm": 0.41037410497665405, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.9168, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05948330182734719, |
|
"grad_norm": 0.41304683685302734, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.8944, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.059987397605545056, |
|
"grad_norm": 0.4369380474090576, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.9477, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06049149338374291, |
|
"grad_norm": 0.4551554322242737, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.9252, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06099558916194077, |
|
"grad_norm": 0.46176278591156006, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.9468, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.061499684940138626, |
|
"grad_norm": 0.47067078948020935, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.9505, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06200378071833648, |
|
"grad_norm": 0.4589996635913849, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.918, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06250787649653435, |
|
"grad_norm": 0.47660380601882935, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.958, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0630119722747322, |
|
"grad_norm": 0.48685434460639954, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 1.0218, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06351606805293006, |
|
"grad_norm": 0.4912755787372589, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.9211, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06402016383112791, |
|
"grad_norm": 0.5366457104682922, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.978, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06452425960932577, |
|
"grad_norm": 0.5451323390007019, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.9386, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06502835538752363, |
|
"grad_norm": 0.5842207074165344, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 1.0105, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.06553245116572148, |
|
"grad_norm": 0.555545449256897, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.9662, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06603654694391935, |
|
"grad_norm": 0.6450053453445435, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.0536, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.0665406427221172, |
|
"grad_norm": 0.6146291494369507, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.8666, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06704473850031506, |
|
"grad_norm": 0.6321956515312195, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.9245, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06754883427851292, |
|
"grad_norm": 0.7116584181785583, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.8686, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.06805293005671077, |
|
"grad_norm": 0.8101832866668701, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.8388, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06855702583490864, |
|
"grad_norm": 0.8933461904525757, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.9507, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06906112161310649, |
|
"grad_norm": 0.9112444519996643, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.8868, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06956521739130435, |
|
"grad_norm": 1.024316668510437, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.1932, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07006931316950221, |
|
"grad_norm": 1.1063545942306519, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 1.0144, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07057340894770006, |
|
"grad_norm": 1.094598412513733, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.9647, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07107750472589792, |
|
"grad_norm": 1.3245705366134644, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 1.092, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07158160050409577, |
|
"grad_norm": 1.3400907516479492, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.9654, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07208569628229364, |
|
"grad_norm": 1.3648507595062256, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.8774, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07258979206049149, |
|
"grad_norm": 1.505003571510315, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.0001, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07309388783868935, |
|
"grad_norm": 1.6857004165649414, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.9833, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07359798361688721, |
|
"grad_norm": 1.766623616218567, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.9873, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.07410207939508506, |
|
"grad_norm": 2.1166927814483643, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.1829, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07460617517328293, |
|
"grad_norm": 3.463778495788574, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.3177, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07511027095148078, |
|
"grad_norm": 3.5200185775756836, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.236, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.07561436672967864, |
|
"grad_norm": 4.4810872077941895, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.4402, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07561436672967864, |
|
"eval_loss": 0.9601866602897644, |
|
"eval_runtime": 70.1677, |
|
"eval_samples_per_second": 47.614, |
|
"eval_steps_per_second": 11.914, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4239728880582656e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|